diff --git a/Dockerfile b/Dockerfile
index 4b6df2a0c121..2528d200ba7e 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -7,14 +7,25 @@ RUN npm run build
FROM python:3.9-alpine
-RUN apk add --no-cache libxml2-dev libxslt-dev libxml2 caddy libffi-dev
-ENV ENV prod
+
+RUN apk add --no-cache libxml2-dev \
+ libxslt-dev \
+ libxml2 caddy \
+ libffi-dev \
+ python3 \
+ python3-dev \
+ jpeg-dev \
+ lcms2-dev \
+ openjpeg-dev \
+ zlib-dev
+
+
+ENV ENV True
EXPOSE 80
WORKDIR /app/
COPY ./pyproject.toml /app/
-
RUN apk add --update --no-cache --virtual .build-deps \
curl \
g++ \
diff --git a/Dockerfile.dev b/Dockerfile.dev
index 8f3d38ffa919..bce6c5ea0cfc 100644
--- a/Dockerfile.dev
+++ b/Dockerfile.dev
@@ -12,10 +12,11 @@ RUN curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-
poetry config virtualenvs.create false
# Copy poetry.lock* in case it doesn't exist in the repo
-COPY ./pyproject.toml ./poetry.lock* /app/
-
-RUN poetry install
+COPY ./pyproject.toml /app/
COPY ./mealie /app/mealie
-CMD ["uvicorn", "mealie.app:app", "--host", "0.0.0.0", "--port", "9000", "--reload"]
\ No newline at end of file
+RUN poetry install
+
+RUN chmod +x /app/mealie/run.sh
+CMD ["/app/mealie/run.sh", "reload"]
diff --git a/dev/data/templates/recipes.md b/dev/data/templates/recipes.md
index 84a404798444..6ee2d4bb6972 100644
--- a/dev/data/templates/recipes.md
+++ b/dev/data/templates/recipes.md
@@ -1,6 +1,6 @@
-
+
# {{ recipe.name }}
{{ recipe.description }}
diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml
index eb1b33251ff1..6e1aac0a9a11 100644
--- a/docker-compose.dev.yml
+++ b/docker-compose.dev.yml
@@ -29,8 +29,8 @@ services:
db_type: sqlite
TZ: America/Anchorage # Specify Correct Timezone for Date/Time to line up correctly.
volumes:
- - ./app_data:/app_data
- - ./mealie:/app
+ - ./dev/data:/app/dev/data
+ - ./mealie:/app/mealie
# Mkdocs
mealie-docs:
diff --git a/docs/docs/changelog/v0.4.1.md b/docs/docs/changelog/v0.4.1.md
new file mode 100644
index 000000000000..9ab671f6afc7
--- /dev/null
+++ b/docs/docs/changelog/v0.4.1.md
@@ -0,0 +1,35 @@
+# v0.4.1
+
+**App Version: v0.4.1**
+
+**Database Version: v0.4.0**
+
+!!! error "Breaking Changes"
+
+ #### Recipe Images
+ While it *shouldn't* be a breaking change, I feel it is important to note that you may experience issues with the new image migration. Recipe images are now minified, this is done on start-up, import, migration, and when a new recipe is created. The initial boot or load may be a bit slow if you have lots of recipes but you likely won't notice. What you may notice is that if your recipe slug and the image name do not match, you will encounter issues with your images showing up. This can be resolved by finding the image directory and rename it to the appropriate slug. I did fix multiple edge cases, but it is likely more exists. As always make a backup before you update!
+
+ On the plus side, this comes with a huge performance increase! 🎉
+
+- Add markdown support for ingredients - Resolves #32
+- Ingredients editor improvements
+- Fix Tags/Categories render problems on recipes
+- Tags redirect to new tag pages
+- Categories redirect to category pages
+- Fix Backup download blocked by authentication
+- Random meal-planner will no longer duplicate recipes unless no other options
+- New Quick Week button to generate next 5 day week of recipe slots.
+- Minor UI tweaks
+- Recipe Cards now display 2 recipe tags
+- Recipe images are now minified. This comes with a serious performance improvement. On initial startup you may experience some delays. Images are migrated to the new structure on startup, depending on the size of your database this can take some time.
+ - Note that original images are still kept for large displays like on the individual recipe pages.
+ - A smaller image is used for recipe cards
+ - A 'tiny' image is used for search images.
+- Advanced Search Page. You can now use the search page to filter recipes to include/exclude tags and categories as well as select And/Or matching criteria.
+- Added link to advanced search on quick search
+- Better support for NextCloud imports
+ - Translate keywords to tags
+ - Fix rollback on failure
+- Recipe Tag/Category Input components have been unified and now share a single way to interact. To add a new category in the recipe editor you need to click to '+' icon next to the input and fill out the form. This is the same for adding a Tag.
+
+
diff --git a/docs/docs/getting-started/api-usage.md b/docs/docs/getting-started/api-usage.md
index 4964ae6edbbe..b58fc485fa90 100644
--- a/docs/docs/getting-started/api-usage.md
+++ b/docs/docs/getting-started/api-usage.md
@@ -10,5 +10,94 @@ For example you could add `{"message": "Remember to thaw the chicken"}` to a rec
## Examples
+### Bulk import
+Recipes can be imported in bulk from a file containing a list of URLs. This can be done using the following bash or python scripts with the `list` file containing one URL per line.
-Have Ideas? Submit a PR!
\ No newline at end of file
+#### Bash
+```bash
+#!/bin/bash
+
+function authentification () {
+ auth=$(curl -X 'POST' \
+ "$3/api/auth/token" \
+ -H 'accept: application/json' \
+ -H 'Content-Type: application/x-www-form-urlencoded' \
+ -d 'grant_type=&username='$1'&password='$2'&scope=&client_id=&client_secret=')
+
+ echo $auth | sed -e 's/.*token":"\(.*\)",.*/\1/'
+}
+
+function import_from_file () {
+ while IFS= read -r line
+ do
+ echo $line
+ curl -X 'POST' \
+ "$3/api/recipes/create-url" \
+ -H "Authorization: Bearer $2" \
+ -H 'accept: application/json' \
+ -H 'Content-Type: application/json' \
+ -d '{"url": "'$line'" }'
+ echo
+ done < "$1"
+}
+
+input="list"
+mail="changeme@email.com"
+password="MyPassword"
+mealie_url=http://localhost:9000
+
+
+token=$(authentification $mail $password $mealie_url)
+import_from_file $input $token $mealie_url
+
+```
+
+#### Python
+```python
+import requests
+import re
+
+def authentification(mail, password, mealie_url):
+ headers = {
+ 'accept': 'application/json',
+ 'Content-Type': 'application/x-www-form-urlencoded',
+ }
+ data = {
+ 'grant_type': '',
+ 'username': mail,
+ 'password': password,
+ 'scope': '',
+ 'client_id': '',
+ 'client_secret': ''
+ }
+ auth = requests.post(mealie_url + "/api/auth/token", headers=headers, data=data)
+ token = re.sub(r'.*token":"(.*)",.*', r'\1', auth.text)
+ return token
+
+def import_from_file(input_file, token, mealie_url):
+ with open(input_file) as fp:
+ for l in fp:
+ line = re.sub(r'(.*)\n', r'\1', l)
+ print(line)
+ headers = {
+ 'Authorization': "Bearer " + token,
+ 'accept': 'application/json',
+ 'Content-Type': 'application/json'
+ }
+ data = {
+ 'url': line
+ }
+ response = requests.post(mealie_url + "/api/recipes/create-url", headers=headers, json=data)
+ print(response.text)
+
+input_file="list"
+mail="changeme@email.com"
+password="MyPassword"
+mealie_url="http://localhost:9000"
+
+
+token = authentification(mail, password, mealie_url)
+import_from_file(input_file, token, mealie_url)
+```
+
+Have Ideas? Submit a PR!
diff --git a/docs/docs/getting-started/install.md b/docs/docs/getting-started/install.md
index 3f2aaf113fe3..737d20e48228 100644
--- a/docs/docs/getting-started/install.md
+++ b/docs/docs/getting-started/install.md
@@ -23,6 +23,11 @@ docker run \
```
+!!! tip "Default Credentials"
+ **Username:** changeme@email.com
+
+ **Password:** MyPassword
+
## Docker Compose with SQLite
Deployment with docker-compose is the recommended method for deployment. The example below will create an instance of mealie available on port `9925` with the data volume mounted from the local directory. To use, create a docker-compose.yml file, paste the contents below and save. In the terminal run `docker-compose up -d` to start the container.
diff --git a/docs/docs/overrides/api.html b/docs/docs/overrides/api.html
index 5249b0d34e31..f679a924c9b4 100644
--- a/docs/docs/overrides/api.html
+++ b/docs/docs/overrides/api.html
@@ -14,7 +14,7 @@
diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml
index 080c3ca3d54c..7d675172a133 100644
--- a/docs/mkdocs.yml
+++ b/docs/mkdocs.yml
@@ -74,6 +74,7 @@ nav:
- Guidelines: "contributors/developers-guide/general-guidelines.md"
- Development Road Map: "roadmap.md"
- Change Log:
+ - v0.4.1 Frontend/UI: "changelog/v0.4.1.md"
- v0.4.0 Authentication: "changelog/v0.4.0.md"
- v0.3.0 Improvements: "changelog/v0.3.0.md"
- v0.2.0 Now With Tests!: "changelog/v0.2.0.md"
diff --git a/frontend/src/App.vue b/frontend/src/App.vue
index 69b2e3c0f3c1..bb545577b9a9 100644
--- a/frontend/src/App.vue
+++ b/frontend/src/App.vue
@@ -1,35 +1,6 @@
-
-
-
- mdi-silverware-variant
-
-
-
-
- Mealie
-
-
-
-
-
-
-
-
- mdi-magnify
-
-
-
-
-
+
@@ -47,34 +18,25 @@
@@ -160,6 +113,7 @@ export default {
.notify-base {
color: white !important;
+ /* min-height: 50px; */
margin-right: 60px;
margin-bottom: -5px;
opacity: 0.9 !important;
@@ -176,11 +130,4 @@ export default {
*::-webkit-scrollbar-thumb {
background: grey;
}
-
-.notify-base {
- color: white !important;
- margin-right: 60px;
- margin-bottom: -5px;
- opacity: 0.9 !important;
-}
diff --git a/frontend/src/api/category.js b/frontend/src/api/category.js
index ef582c580478..f60739c8dc61 100644
--- a/frontend/src/api/category.js
+++ b/frontend/src/api/category.js
@@ -5,27 +5,27 @@ import { store } from "@/store";
const prefix = baseURL + "categories";
const categoryURLs = {
- get_all: `${prefix}`,
- get_category: category => `${prefix}/${category}`,
- delete_category: category => `${prefix}/${category}`,
+ getAll: `${prefix}`,
+ getCategory: category => `${prefix}/${category}`,
+ deleteCategory: category => `${prefix}/${category}`,
};
export const categoryAPI = {
async getAll() {
- let response = await apiReq.get(categoryURLs.get_all);
+ let response = await apiReq.get(categoryURLs.getAll);
return response.data;
},
async create(name) {
- let response = await apiReq.post(categoryURLs.get_all, { name: name });
+ let response = await apiReq.post(categoryURLs.getAll, { name: name });
store.dispatch("requestCategories");
return response.data;
},
async getRecipesInCategory(category) {
- let response = await apiReq.get(categoryURLs.get_category(category));
+ let response = await apiReq.get(categoryURLs.getCategory(category));
return response.data;
},
async delete(category) {
- let response = await apiReq.delete(categoryURLs.delete_category(category));
+ let response = await apiReq.delete(categoryURLs.deleteCategory(category));
store.dispatch("requestCategories");
return response.data;
},
@@ -44,6 +44,11 @@ export const tagAPI = {
let response = await apiReq.get(tagURLs.getAll);
return response.data;
},
+ async create(name) {
+ let response = await apiReq.post(tagURLs.getAll, { name: name });
+ store.dispatch("requestTags");
+ return response.data;
+ },
async getRecipesInTag(tag) {
let response = await apiReq.get(tagURLs.getTag(tag));
return response.data;
diff --git a/frontend/src/api/meta.js b/frontend/src/api/meta.js
index bcf6470fcf99..59183c0c5ea1 100644
--- a/frontend/src/api/meta.js
+++ b/frontend/src/api/meta.js
@@ -5,15 +5,22 @@ const prefix = baseURL + "debug";
const debugURLs = {
version: `${prefix}/version`,
+ debug: `${prefix}`,
lastRecipe: `${prefix}/last-recipe-json`,
demo: `${prefix}/is-demo`,
};
export const metaAPI = {
- async get_version() {
+ async getAppInfo() {
let response = await apiReq.get(debugURLs.version);
return response.data;
},
+
+ async getDebugInfo() {
+ const response = await apiReq.get(debugURLs.debug);
+ return response.data;
+ },
+
async getLastJson() {
let response = await apiReq.get(debugURLs.lastRecipe);
return response.data;
@@ -21,7 +28,6 @@ export const metaAPI = {
async getIsDemo() {
let response = await apiReq.get(debugURLs.demo);
- console.log(response);
return response.data;
},
};
diff --git a/frontend/src/api/recipe.js b/frontend/src/api/recipe.js
index 97a4b08c4713..50e45e593365 100644
--- a/frontend/src/api/recipe.js
+++ b/frontend/src/api/recipe.js
@@ -8,6 +8,7 @@ const prefix = baseURL + "recipes/";
const recipeURLs = {
allRecipes: baseURL + "recipes",
+ summary: baseURL + "recipes" + "/summary",
allRecipesByCategory: prefix + "category",
create: prefix + "create",
createByURL: prefix + "create-url",
@@ -56,9 +57,7 @@ export const recipeAPI = {
const fd = new FormData();
fd.append("image", fileObject);
fd.append("extension", fileObject.name.split(".").pop());
-
let response = apiReq.put(recipeURLs.updateImage(recipeSlug), fd);
-
return response;
},
@@ -87,4 +86,21 @@ export const recipeAPI = {
return response.data;
},
+
+ async allSummary() {
+ const response = await apiReq.get(recipeURLs.summary);
+ return response.data;
+ },
+
+ recipeImage(recipeSlug) {
+ return `/api/recipes/${recipeSlug}/image?image_type=original`;
+ },
+
+ recipeSmallImage(recipeSlug) {
+ return `/api/recipes/${recipeSlug}/image?image_type=small`;
+ },
+
+ recipeTinyImage(recipeSlug) {
+ return `/api/recipes/${recipeSlug}/image?image_type=tiny`;
+ },
};
diff --git a/frontend/src/components/Admin/AdminSidebar.vue b/frontend/src/components/Admin/AdminSidebar.vue
index 5e719307b23d..f09070492e02 100644
--- a/frontend/src/components/Admin/AdminSidebar.vue
+++ b/frontend/src/components/Admin/AdminSidebar.vue
@@ -74,7 +74,7 @@
-
+
mdi-information
@@ -83,10 +83,11 @@
{{ $t("settings.current") }}
- {{ version }}
+ {{ appVersion }}
-
+
{{ $t("general.download") }}
@@ -61,6 +66,7 @@
diff --git a/frontend/src/components/Admin/General/CreatePageDialog.vue b/frontend/src/components/Admin/General/CreatePageDialog.vue
index efedf8cf015b..57f6830ed2f3 100644
--- a/frontend/src/components/Admin/General/CreatePageDialog.vue
+++ b/frontend/src/components/Admin/General/CreatePageDialog.vue
@@ -19,10 +19,11 @@
v-model="page.name"
label="Page Name"
>
-
@@ -43,10 +44,10 @@
\ No newline at end of file
diff --git a/frontend/src/components/Admin/Migration/MigrationCard.vue b/frontend/src/components/Admin/Migration/MigrationCard.vue
index 7f3ef7ed4886..8dad1af08f5a 100644
--- a/frontend/src/components/Admin/Migration/MigrationCard.vue
+++ b/frontend/src/components/Admin/Migration/MigrationCard.vue
@@ -40,7 +40,13 @@
{{ $t("general.delete") }}
-
+
{{ $t("general.import") }}
@@ -82,10 +88,10 @@ export default {
this.$emit("refresh");
},
async importMigration(file_name) {
- this.loading == true;
+ this.loading = true;
let response = await api.migrations.import(this.folder, file_name);
this.$emit("imported", response.successful, response.failed);
- this.loading == false;
+ this.loading = false;
},
readableTime(timestamp) {
let date = new Date(timestamp);
diff --git a/frontend/src/components/FormHelpers/CategorySelector.vue b/frontend/src/components/FormHelpers/CategorySelector.vue
deleted file mode 100644
index 390bd60d6043..000000000000
--- a/frontend/src/components/FormHelpers/CategorySelector.vue
+++ /dev/null
@@ -1,50 +0,0 @@
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/frontend/src/components/FormHelpers/CategoryTagSelector.vue b/frontend/src/components/FormHelpers/CategoryTagSelector.vue
new file mode 100644
index 000000000000..4abdffe1f10c
--- /dev/null
+++ b/frontend/src/components/FormHelpers/CategoryTagSelector.vue
@@ -0,0 +1,129 @@
+
+
+
+
+ {{ data.item.name || data.item }}
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/frontend/src/components/Login/LoginForm.vue b/frontend/src/components/Login/LoginForm.vue
index b2cc18981dc7..1cb48b729e26 100644
--- a/frontend/src/components/Login/LoginForm.vue
+++ b/frontend/src/components/Login/LoginForm.vue
@@ -22,13 +22,11 @@
diff --git a/frontend/src/components/Recipe/MobileRecipeCard.vue b/frontend/src/components/Recipe/MobileRecipeCard.vue
index 9b436c076022..c0df606bf5d4 100644
--- a/frontend/src/components/Recipe/MobileRecipeCard.vue
+++ b/frontend/src/components/Recipe/MobileRecipeCard.vue
@@ -1,8 +1,13 @@
-
+
-
+
@@ -20,7 +25,7 @@
-
\ No newline at end of file
diff --git a/frontend/src/components/Recipe/RecipeViewer/RecipeChips.vue b/frontend/src/components/Recipe/RecipeViewer/RecipeChips.vue
index 825f2e8d9732..de8d9a4b4354 100644
--- a/frontend/src/components/Recipe/RecipeViewer/RecipeChips.vue
+++ b/frontend/src/components/Recipe/RecipeViewer/RecipeChips.vue
@@ -1,13 +1,14 @@
-
-
{{ title }}
+
+
{{ title }}
{{ category }}
@@ -18,21 +19,43 @@
-
\ No newline at end of file
diff --git a/frontend/src/components/UI/TheAppBar.vue b/frontend/src/components/UI/TheAppBar.vue
new file mode 100644
index 000000000000..1c556feb6a00
--- /dev/null
+++ b/frontend/src/components/UI/TheAppBar.vue
@@ -0,0 +1,114 @@
+
+
+
+
+
+ mdi-silverware-variant
+
+
+
+
+ Mealie
+
+
+
+
+
+
+
+
+ mdi-magnify
+
+
+
+
+
+
+
+ mdi-silverware-variant
+
+
+
+
+ Mealie
+
+
+
+
+
+
+
+
+ mdi-magnify
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/frontend/src/mixins/utilMixins.js b/frontend/src/mixins/utilMixins.js
new file mode 100644
index 000000000000..2b9bf8504264
--- /dev/null
+++ b/frontend/src/mixins/utilMixins.js
@@ -0,0 +1,7 @@
+export const utilMixins = {
+ commputed: {
+ isMobile() {
+ return this.$vuetify.breakpoint.name === "xs";
+ },
+ },
+};
diff --git a/frontend/src/mixins/validators.js b/frontend/src/mixins/validators.js
index 82a7e952611c..4ba7e6eabbd2 100644
--- a/frontend/src/mixins/validators.js
+++ b/frontend/src/mixins/validators.js
@@ -3,7 +3,7 @@ export const validators = {
return {
emailRule: v =>
!v ||
- /^\w+([.-]?\w+)*@\w+([.-]?\w+)*(\.\w{2,3})+$/.test(v) ||
+ /^[^@\s]+@[^@\s.]+.[^@.\s]+$/.test(v) ||
this.$t('user.e-mail-must-be-valid'),
existsRule: value => !!value || this.$t('general.field-required'),
diff --git a/frontend/src/pages/Admin/About/index.vue b/frontend/src/pages/Admin/About/index.vue
new file mode 100644
index 000000000000..453520b1a866
--- /dev/null
+++ b/frontend/src/pages/Admin/About/index.vue
@@ -0,0 +1,91 @@
+
+
+
+
+ About Mealie
+
+
+
+
+
+
+ {{ property.icon || "mdi-account" }}
+
+
+
+ {{ property.name }}
+ {{ property.value }}
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/frontend/src/pages/Admin/MealPlanner/index.vue b/frontend/src/pages/Admin/MealPlanner/index.vue
index a1b953430890..5fa85c72cde3 100644
--- a/frontend/src/pages/Admin/MealPlanner/index.vue
+++ b/frontend/src/pages/Admin/MealPlanner/index.vue
@@ -7,41 +7,19 @@
{{ $t("recipe.categories") }}
-
-
-
-
-
- {{ data.item.name }}
-
-
-
-
-
+
@@ -57,28 +35,23 @@
{{ groupSettings.webhookTime }}
-
-
-
-
-
-
-
-
-
- mdi-webhook
- {{ $t("settings.webhooks.test-webhooks") }}
-
-
+
+
+
+
+ mdi-webhook
+ {{ $t("settings.webhooks.test-webhooks") }}
+
@@ -110,9 +83,11 @@
diff --git a/frontend/src/pages/HomePage.vue b/frontend/src/pages/HomePage.vue
index ac6b06a6f09b..a57a2b0a23ae 100644
--- a/frontend/src/pages/HomePage.vue
+++ b/frontend/src/pages/HomePage.vue
@@ -36,7 +36,6 @@ export default {
},
computed: {
siteSettings() {
- console.log(this.$store.getters.getSiteSettings);
return this.$store.getters.getSiteSettings;
},
recentRecipes() {
@@ -54,7 +53,6 @@ export default {
this.siteSettings.categories.forEach(async element => {
let recipes = await this.getRecipeByCategory(element.slug);
if (recipes.recipes.length < 0) recipes.recipes = [];
- console.log(recipes);
this.recipeByCategory.push(recipes);
});
},
diff --git a/frontend/src/pages/MealPlan/Planner.vue b/frontend/src/pages/MealPlan/Planner.vue
index 57df248a3835..2d472216d244 100644
--- a/frontend/src/pages/MealPlan/Planner.vue
+++ b/frontend/src/pages/MealPlan/Planner.vue
@@ -117,7 +117,7 @@ export default {
return utils.getDateAsTextAlt(dateObject);
},
getImage(image) {
- return utils.getImageURL(image);
+ return api.recipes.recipeTinyImage(image);
},
editPlan(id) {
diff --git a/frontend/src/pages/MealPlan/ThisWeek.vue b/frontend/src/pages/MealPlan/ThisWeek.vue
index a361ba0e16e1..e8279d319f80 100644
--- a/frontend/src/pages/MealPlan/ThisWeek.vue
+++ b/frontend/src/pages/MealPlan/ThisWeek.vue
@@ -52,7 +52,6 @@
+
+
\ No newline at end of file
diff --git a/frontend/src/pages/SearchPage.vue b/frontend/src/pages/SearchPage.vue
deleted file mode 100644
index 77f6b6d66551..000000000000
--- a/frontend/src/pages/SearchPage.vue
+++ /dev/null
@@ -1,58 +0,0 @@
-
-
-
-
-
-
-
-
-
- mdi-filter
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/frontend/src/pages/SearchPage/FilterSelector.vue b/frontend/src/pages/SearchPage/FilterSelector.vue
new file mode 100644
index 000000000000..c46649e51d2b
--- /dev/null
+++ b/frontend/src/pages/SearchPage/FilterSelector.vue
@@ -0,0 +1,69 @@
+
+
+
+
+ Include
+
+
+
+ Exclude
+
+
+
+
+
+ And
+
+
+ Or
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/frontend/src/pages/SearchPage/index.vue b/frontend/src/pages/SearchPage/index.vue
new file mode 100644
index 000000000000..697f21de0238
--- /dev/null
+++ b/frontend/src/pages/SearchPage/index.vue
@@ -0,0 +1,178 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Category Filter
+
+
+
+
+ Tag Filter
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/frontend/src/routes/admin.js b/frontend/src/routes/admin.js
index e7637007bfe6..e52927db249d 100644
--- a/frontend/src/routes/admin.js
+++ b/frontend/src/routes/admin.js
@@ -6,6 +6,7 @@ import Migration from "@/pages/Admin/Migration";
import Profile from "@/pages/Admin/Profile";
import ManageUsers from "@/pages/Admin/ManageUsers";
import Settings from "@/pages/Admin/Settings";
+import About from "@/pages/Admin/About";
import { store } from "../store";
export default {
@@ -50,5 +51,9 @@ export default {
path: "settings",
component: Settings,
},
+ {
+ path: "about",
+ component: About,
+ },
],
};
diff --git a/frontend/src/routes/index.js b/frontend/src/routes/index.js
index cca7aa276493..af58a187d4dd 100644
--- a/frontend/src/routes/index.js
+++ b/frontend/src/routes/index.js
@@ -6,6 +6,7 @@ import NewRecipe from "@/pages/Recipe/NewRecipe";
import CustomPage from "@/pages/Recipes/CustomPage";
import AllRecipes from "@/pages/Recipes/AllRecipes";
import CategoryPage from "@/pages/Recipes/CategoryPage";
+import TagPage from "@/pages/Recipes/TagPage";
import Planner from "@/pages/MealPlan/Planner";
import Debug from "@/pages/Debug";
import LoginPage from "@/pages/LoginPage";
@@ -33,7 +34,8 @@ export const routes = [
{ path: "/search", component: SearchPage },
{ path: "/recipes/all", component: AllRecipes },
{ path: "/pages/:customPage", component: CustomPage },
- { path: "/recipes/:category", component: CategoryPage },
+ { path: "/recipes/tag/:tag", component: TagPage },
+ { path: "/recipes/category/:category", component: CategoryPage },
{ path: "/recipe/:recipe", component: ViewRecipe },
{ path: "/new/", component: NewRecipe },
{ path: "/meal-plan/planner", component: Planner },
diff --git a/frontend/src/store/index.js b/frontend/src/store/index.js
index 09c07cdd2148..f8f17282e3b8 100644
--- a/frontend/src/store/index.js
+++ b/frontend/src/store/index.js
@@ -28,6 +28,10 @@ const store = new Vuex.Store({
mealPlanCategories: [],
allCategories: [],
allTags: [],
+ appInfo: {
+ version: "",
+ demoStatus: false,
+ },
},
mutations: {
@@ -43,19 +47,22 @@ const store = new Vuex.Store({
setAllTags(state, payload) {
state.allTags = payload;
},
+ setAppInfo(state, payload) {
+ state.appInfo = payload;
+ },
},
actions: {
async requestRecentRecipes() {
- const keys = [
- "name",
- "slug",
- "image",
- "description",
- "dateAdded",
- "rating",
- ];
- const payload = await api.recipes.allByKeys(keys);
+ // const keys = [
+ // "name",
+ // "slug",
+ // "image",
+ // "description",
+ // "dateAdded",
+ // "rating",
+ // ];
+ const payload = await api.recipes.allSummary();
this.commit("setRecentRecipes", payload);
},
@@ -67,6 +74,11 @@ const store = new Vuex.Store({
const tags = await api.tags.getAll();
commit("setAllTags", tags);
},
+
+ async requestAppInfo({ commit }) {
+ const response = await api.meta.getAppInfo();
+ commit("setAppInfo", response);
+ },
},
getters: {
@@ -76,6 +88,7 @@ const store = new Vuex.Store({
state.allCategories.sort((a, b) => (a.slug > b.slug ? 1 : -1)),
getAllTags: state =>
state.allTags.sort((a, b) => (a.slug > b.slug ? 1 : -1)),
+ getAppInfo: state => state.appInfo,
},
});
diff --git a/frontend/src/store/modules/userSettings.js b/frontend/src/store/modules/userSettings.js
index 352c89c43438..7c01b12e1174 100644
--- a/frontend/src/store/modules/userSettings.js
+++ b/frontend/src/store/modules/userSettings.js
@@ -70,7 +70,7 @@ const actions = {
async refreshToken({ commit, getters }) {
if (!getters.getIsLoggedIn) {
- commit("setIsLoggedIn", false); // This is to be here... for some reasons? ¯\_(ツ)_/¯
+ commit("setIsLoggedIn", false); // This has to be here... for some reasons? ¯\_(ツ)_/¯
console.log("Not Logged In");
return;
}
diff --git a/frontend/src/utils/index.js b/frontend/src/utils/index.js
index 925f94534258..e292d90624b4 100644
--- a/frontend/src/utils/index.js
+++ b/frontend/src/utils/index.js
@@ -50,7 +50,7 @@ const monthsShort = [
export default {
getImageURL(image) {
- return `/api/recipes/${image}/image`;
+ return `/api/recipes/${image}/image?image_type=small`;
},
generateUniqueKey(item, index) {
const uniqueKey = `${item}-${index}`;
diff --git a/makefile b/makefile
index 4eba5010f2e1..595fe19996ec 100644
--- a/makefile
+++ b/makefile
@@ -54,6 +54,7 @@ setup: ## Setup Development Instance
backend: ## Start Mealie Backend Development Server
poetry run python mealie/db/init_db.py && \
+ poetry run python mealie/services/image/minify.py && \
poetry run python mealie/app.py
diff --git a/mealie/app.py b/mealie/app.py
index dc664f961316..e6223d50380f 100644
--- a/mealie/app.py
+++ b/mealie/app.py
@@ -51,7 +51,7 @@ start_scheduler()
def main():
-
+
uvicorn.run(
"app:app",
host="0.0.0.0",
diff --git a/mealie/core/config.py b/mealie/core/config.py
index aca765dfff10..b876ae9bb93b 100644
--- a/mealie/core/config.py
+++ b/mealie/core/config.py
@@ -1,18 +1,18 @@
import os
import secrets
from pathlib import Path
+from typing import Optional, Union
-import dotenv
+from pydantic import BaseSettings, Field, validator
-APP_VERSION = "v0.4.0"
+APP_VERSION = "v0.4.1"
DB_VERSION = "v0.4.0"
CWD = Path(__file__).parent
BASE_DIR = CWD.parent.parent
ENV = BASE_DIR.joinpath(".env")
-dotenv.load_dotenv(ENV)
-PRODUCTION = os.environ.get("ENV")
+PRODUCTION = os.getenv("ENV", "False").lower() in ["true", "1"]
def determine_data_dir(production: bool) -> Path:
@@ -38,21 +38,26 @@ def determine_secrets(data_dir: Path, production: bool) -> str:
return new_secret
+# General
+DATA_DIR = determine_data_dir(PRODUCTION)
+LOGGER_FILE = DATA_DIR.joinpath("mealie.log")
+
+
class AppDirectories:
def __init__(self, cwd, data_dir) -> None:
- self.DATA_DIR = data_dir
- self.WEB_PATH = cwd.joinpath("dist")
- self.IMG_DIR = data_dir.joinpath("img")
- self.BACKUP_DIR = data_dir.joinpath("backups")
- self.DEBUG_DIR = data_dir.joinpath("debug")
- self.MIGRATION_DIR = data_dir.joinpath("migration")
- self.NEXTCLOUD_DIR = self.MIGRATION_DIR.joinpath("nextcloud")
- self.CHOWDOWN_DIR = self.MIGRATION_DIR.joinpath("chowdown")
- self.TEMPLATE_DIR = data_dir.joinpath("templates")
- self.USER_DIR = data_dir.joinpath("users")
- self.SQLITE_DIR = data_dir.joinpath("db")
- self.RECIPE_DATA_DIR = data_dir.joinpath("recipes")
- self.TEMP_DIR = data_dir.joinpath(".temp")
+ self.DATA_DIR: Path = data_dir
+ self.WEB_PATH: Path = cwd.joinpath("dist")
+ self.IMG_DIR: Path = data_dir.joinpath("img")
+ self.BACKUP_DIR: Path = data_dir.joinpath("backups")
+ self.DEBUG_DIR: Path = data_dir.joinpath("debug")
+ self.MIGRATION_DIR: Path = data_dir.joinpath("migration")
+ self.NEXTCLOUD_DIR: Path = self.MIGRATION_DIR.joinpath("nextcloud")
+ self.CHOWDOWN_DIR: Path = self.MIGRATION_DIR.joinpath("chowdown")
+ self.TEMPLATE_DIR: Path = data_dir.joinpath("templates")
+ self.USER_DIR: Path = data_dir.joinpath("users")
+ self.SQLITE_DIR: Path = data_dir.joinpath("db")
+ self.RECIPE_DATA_DIR: Path = data_dir.joinpath("recipes")
+ self.TEMP_DIR: Path = data_dir.joinpath(".temp")
self.ensure_directories()
@@ -74,36 +79,51 @@ class AppDirectories:
dir.mkdir(parents=True, exist_ok=True)
-class AppSettings:
- def __init__(self, app_dirs: AppDirectories) -> None:
- global DB_VERSION
- self.PRODUCTION = bool(os.environ.get("ENV"))
- self.IS_DEMO = os.getenv("DEMO", "False") == "True"
- self.API_PORT = int(os.getenv("API_PORT", 9000))
- self.API = os.getenv("API_DOCS", "True") == "True"
- self.DOCS_URL = "/docs" if self.API else None
- self.REDOC_URL = "/redoc" if self.API else None
- self.SECRET = determine_secrets(app_dirs.DATA_DIR, self.PRODUCTION)
- self.DATABASE_TYPE = os.getenv("DB_TYPE", "sqlite")
-
- # Used to Set SQLite File Version
- self.SQLITE_FILE = None
- if self.DATABASE_TYPE == "sqlite":
- self.SQLITE_FILE = app_dirs.SQLITE_DIR.joinpath(f"mealie_{DB_VERSION}.sqlite")
- else:
- raise Exception("Unable to determine database type. Acceptible options are 'sqlite'")
-
- self.DEFAULT_GROUP = os.getenv("DEFAULT_GROUP", "Home")
- self.DEFAULT_PASSWORD = os.getenv("DEFAULT_PASSWORD", "MyPassword")
-
- # Not Used!
- self.SFTP_USERNAME = os.getenv("SFTP_USERNAME", None)
- self.SFTP_PASSWORD = os.getenv("SFTP_PASSWORD", None)
-
-
-# General
-DATA_DIR = determine_data_dir(PRODUCTION)
-LOGGER_FILE = DATA_DIR.joinpath("mealie.log")
-
app_dirs = AppDirectories(CWD, DATA_DIR)
-settings = AppSettings(app_dirs)
+
+
+class AppSettings(BaseSettings):
+ global DATA_DIR
+ PRODUCTION: bool = Field(False, env="ENV")
+ IS_DEMO: bool = False
+ API_PORT: int = 9000
+ API_DOCS: bool = True
+
+ @property
+ def DOCS_URL(self) -> str:
+ return "/docs" if self.API_DOCS else None
+
+ @property
+ def REDOC_URL(self) -> str:
+ return "/redoc" if self.API_DOCS else None
+
+ SECRET: str = determine_secrets(DATA_DIR, PRODUCTION)
+ DATABASE_TYPE: str = Field("sqlite", env="DB_TYPE")
+
+ @validator("DATABASE_TYPE", pre=True)
+ def validate_db_type(cls, v: str) -> Optional[str]:
+ if v != "sqlite":
+ raise ValueError("Unable to determine database type. Acceptible options are 'sqlite'")
+ else:
+ return v
+
+ # Used to Set SQLite File Version
+ SQLITE_FILE: Optional[Union[str, Path]]
+
+ @validator("SQLITE_FILE", pre=True)
+ def identify_sqlite_file(cls, v: str) -> Optional[str]:
+ return app_dirs.SQLITE_DIR.joinpath(f"mealie_{DB_VERSION}.sqlite")
+
+ DEFAULT_GROUP: str = "Home"
+ DEFAULT_PASSWORD: str = "MyPassword"
+
+ # Not Used!
+ SFTP_USERNAME: Optional[str]
+ SFTP_PASSWORD: Optional[str]
+
+ class Config:
+ env_file = BASE_DIR.joinpath(".env")
+ env_file_encoding = "utf-8"
+
+
+settings = AppSettings()
diff --git a/mealie/db/database.py b/mealie/db/database.py
index b2543579852b..69e3830d1eb9 100644
--- a/mealie/db/database.py
+++ b/mealie/db/database.py
@@ -9,7 +9,8 @@ from mealie.db.models.users import User
from mealie.schema.category import RecipeCategoryResponse, RecipeTagResponse
from mealie.schema.meal import MealPlanInDB
from mealie.schema.recipe import Recipe
-from mealie.schema.settings import CustomPageOut, SiteSettings as SiteSettingsSchema
+from mealie.schema.settings import CustomPageOut
+from mealie.schema.settings import SiteSettings as SiteSettingsSchema
from mealie.schema.sign_up import SignUpOut
from mealie.schema.theme import SiteTheme
from mealie.schema.user import GroupInDB, UserInDB
diff --git a/mealie/db/db_base.py b/mealie/db/db_base.py
index 6aef2b24d8fc..f9e9a03a9b86 100644
--- a/mealie/db/db_base.py
+++ b/mealie/db/db_base.py
@@ -15,17 +15,10 @@ class BaseDocument:
self.schema: BaseModel
# TODO: Improve Get All Query Functionality
- def get_all(self, session: Session, limit: int = None, order_by: str = None) -> List[dict]:
+ def get_all(self, session: Session, limit: int = None, order_by: str = None, override_schema=None) -> List[dict]:
+ eff_schema = override_schema or self.schema
- if self.orm_mode:
- return [self.schema.from_orm(x) for x in session.query(self.sql_model).limit(limit).all()]
-
- # list = [x.dict() for x in session.query(self.sql_model).limit(limit).all()]
-
- # if limit == 1:
- # return list[0]
-
- # return list
+ return [eff_schema.from_orm(x) for x in session.query(self.sql_model).limit(limit).all()]
def get_all_limit_columns(self, session: Session, fields: List[str], limit: int = None) -> List[SqlAlchemyBase]:
"""Queries the database for the selected model. Restricts return responses to the
diff --git a/mealie/db/init_db.py b/mealie/db/init_db.py
index 11c63bd3b65b..2dacbf438c4a 100644
--- a/mealie/db/init_db.py
+++ b/mealie/db/init_db.py
@@ -47,11 +47,12 @@ def default_user_init(session: Session):
logger.info("Generating Default User")
db.users.create(session, default_user)
-
-if __name__ == "__main__":
+def main():
if sql_exists:
print("Database Exists")
- exit()
else:
print("Database Doesn't Exists, Initializing...")
init_db()
+
+if __name__ == "__main__":
+ main()
\ No newline at end of file
diff --git a/mealie/db/models/recipe/recipe.py b/mealie/db/models/recipe/recipe.py
index e83134f99fee..28e512db0c9b 100644
--- a/mealie/db/models/recipe/recipe.py
+++ b/mealie/db/models/recipe/recipe.py
@@ -60,7 +60,7 @@ class RecipeModel(SqlAlchemyBase, BaseMixins):
@validates("name")
def validate_name(self, key, name):
- assert not name == ""
+ assert name != ""
return name
def __init__(
@@ -92,11 +92,7 @@ class RecipeModel(SqlAlchemyBase, BaseMixins):
self.image = image
self.recipeCuisine = recipeCuisine
- if self.nutrition:
- self.nutrition = Nutrition(**nutrition)
- else:
- self.nutrition = Nutrition()
-
+ self.nutrition = Nutrition(**nutrition) if self.nutrition else Nutrition()
self.tools = [Tool(tool=x) for x in tools] if tools else []
self.recipeYield = recipeYield
diff --git a/mealie/db/models/recipe/tag.py b/mealie/db/models/recipe/tag.py
index 1977055b9821..8c7e5ad46864 100644
--- a/mealie/db/models/recipe/tag.py
+++ b/mealie/db/models/recipe/tag.py
@@ -1,7 +1,7 @@
import sqlalchemy as sa
import sqlalchemy.orm as orm
-from mealie.db.models.model_base import SqlAlchemyBase
from fastapi.logger import logger
+from mealie.db.models.model_base import SqlAlchemyBase
from slugify import slugify
from sqlalchemy.orm import validates
@@ -25,7 +25,7 @@ class Tag(SqlAlchemyBase):
assert name != ""
return name
- def __init__(self, name) -> None:
+ def __init__(self, name, session=None) -> None:
self.name = name.strip()
self.slug = slugify(self.name)
diff --git a/mealie/routes/debug_routes.py b/mealie/routes/debug_routes.py
index 1a0332b7aaad..933e781c827c 100644
--- a/mealie/routes/debug_routes.py
+++ b/mealie/routes/debug_routes.py
@@ -3,20 +3,35 @@ import json
from fastapi import APIRouter, Depends
from mealie.core.config import APP_VERSION, LOGGER_FILE, app_dirs, settings
from mealie.routes.deps import get_current_user
+from mealie.schema.debug import AppInfo, DebugInfo
router = APIRouter(prefix="/api/debug", tags=["Debug"])
+@router.get("")
+async def get_debug_info(current_user=Depends(get_current_user)):
+ """ Returns general information about the application for debugging """
+
+ return DebugInfo(
+ production=settings.PRODUCTION,
+ version=APP_VERSION,
+ demo_status=settings.IS_DEMO,
+ api_port=settings.API_PORT,
+ api_docs=settings.API_DOCS,
+ db_type=settings.DATABASE_TYPE,
+ sqlite_file=settings.SQLITE_FILE,
+ default_group=settings.DEFAULT_GROUP,
+ )
+
+
@router.get("/version")
-async def get_mealie_version(current_user=Depends(get_current_user)):
+async def get_mealie_version():
""" Returns the current version of mealie"""
- return {"version": APP_VERSION}
-
-
-@router.get("/is-demo")
-async def get_demo_status():
- print(settings.IS_DEMO)
- return {"demoStatus": settings.IS_DEMO}
+ return AppInfo(
+ version=APP_VERSION,
+ demo_status=settings.IS_DEMO,
+ production=settings.PRODUCTION,
+ )
@router.get("/last-recipe-json")
diff --git a/mealie/routes/mealplans/crud.py b/mealie/routes/mealplans/crud.py
index c0498e6a131e..f1dcc02bb220 100644
--- a/mealie/routes/mealplans/crud.py
+++ b/mealie/routes/mealplans/crud.py
@@ -1,12 +1,14 @@
-from fastapi import APIRouter, Depends
+from fastapi import APIRouter, Depends, HTTPException
from mealie.db.database import db
from mealie.db.db_setup import generate_session
from mealie.routes.deps import get_current_user
from mealie.schema.meal import MealPlanIn, MealPlanInDB
from mealie.schema.snackbar import SnackResponse
from mealie.schema.user import GroupInDB, UserInDB
+from mealie.services.image import image
from mealie.services.meal_services import get_todays_meal, process_meals
from sqlalchemy.orm.session import Session
+from starlette.responses import FileResponse
router = APIRouter(prefix="/api/meal-plans", tags=["Meal Plan"])
@@ -74,3 +76,22 @@ def get_today(session: Session = Depends(generate_session), current_user: UserIn
recipe = get_todays_meal(session, group_in_db)
return recipe.slug
+
+
+@router.get("/today/image", tags=["Meal Plan"])
+def get_today(session: Session = Depends(generate_session), group_name: str = "Home"):
+ """
+ Returns the image for todays meal-plan.
+ """
+
+ group_in_db: GroupInDB = db.groups.get(session, group_name, "name")
+ recipe = get_todays_meal(session, group_in_db)
+
+ if recipe:
+ recipe_image = image.read_image(recipe.slug, image_type=image.IMG_OPTIONS.ORIGINAL_IMAGE)
+ else:
+ raise HTTPException(404, "no meal for today")
+ if recipe_image:
+ return FileResponse(recipe_image)
+ else:
+ raise HTTPException(404, "file not found")
diff --git a/mealie/routes/recipe/all_recipe_routes.py b/mealie/routes/recipe/all_recipe_routes.py
index 6599054836a3..42dd6940bd4e 100644
--- a/mealie/routes/recipe/all_recipe_routes.py
+++ b/mealie/routes/recipe/all_recipe_routes.py
@@ -3,13 +3,24 @@ from typing import List, Optional
from fastapi import APIRouter, Depends, Query
from mealie.db.database import db
from mealie.db.db_setup import generate_session
-from mealie.schema.recipe import AllRecipeRequest
+from mealie.schema.recipe import AllRecipeRequest, RecipeSummary
from slugify import slugify
from sqlalchemy.orm.session import Session
router = APIRouter(tags=["Query All Recipes"])
+@router.get("/api/recipes/summary")
+async def get_recipe_summary(
+ skip=0,
+ end=9999,
+ session: Session = Depends(generate_session),
+):
+ """ Returns the summary data for recipes in the database """
+
+ return db.recipes.get_all(session, limit=end, override_schema=RecipeSummary)
+
+
@router.get("/api/recipes")
def get_all_recipes(
keys: Optional[List[str]] = Query(...),
diff --git a/mealie/routes/recipe/recipe_crud_routes.py b/mealie/routes/recipe/recipe_crud_routes.py
index e391b95baac3..b02cec248546 100644
--- a/mealie/routes/recipe/recipe_crud_routes.py
+++ b/mealie/routes/recipe/recipe_crud_routes.py
@@ -1,3 +1,5 @@
+from enum import Enum
+
from fastapi import APIRouter, Depends, File, Form, HTTPException
from fastapi.responses import FileResponse
from mealie.db.database import db
@@ -5,7 +7,7 @@ from mealie.db.db_setup import generate_session
from mealie.routes.deps import get_current_user
from mealie.schema.recipe import Recipe, RecipeURLIn
from mealie.schema.snackbar import SnackResponse
-from mealie.services.image_services import read_image, write_image
+from mealie.services.image.image import IMG_OPTIONS, delete_image, read_image, rename_image, write_image
from mealie.services.scraper.scraper import create_from_url
from sqlalchemy.orm.session import Session
@@ -59,6 +61,9 @@ def update_recipe(
recipe: Recipe = db.recipes.update(session, recipe_slug, data.dict())
+ if recipe_slug != recipe.slug:
+ rename_image(original_slug=recipe_slug, new_slug=recipe.slug)
+
return recipe.slug
@@ -72,20 +77,34 @@ def delete_recipe(
try:
db.recipes.delete(session, recipe_slug)
+ delete_image(recipe_slug)
except:
raise HTTPException(status_code=404, detail=SnackResponse.error("Unable to Delete Recipe"))
return SnackResponse.error(f"Recipe {recipe_slug} Deleted")
+class ImageType(str, Enum):
+ original = "original"
+ small = "small"
+ tiny = "tiny"
+
+
@router.get("/{recipe_slug}/image")
-async def get_recipe_img(recipe_slug: str):
+async def get_recipe_img(recipe_slug: str, image_type: ImageType = ImageType.original):
""" Takes in a recipe slug, returns the static image """
- recipe_image = read_image(recipe_slug)
+ if image_type == ImageType.original:
+ which_image = IMG_OPTIONS.ORIGINAL_IMAGE
+ elif image_type == ImageType.small:
+ which_image = IMG_OPTIONS.MINIFIED_IMAGE
+ elif image_type == ImageType.tiny:
+ which_image = IMG_OPTIONS.TINY_IMAGE
+
+ recipe_image = read_image(recipe_slug, image_type=which_image)
if recipe_image:
return FileResponse(recipe_image)
else:
- return
+ raise HTTPException(404, "file not found")
@router.put("/{recipe_slug}/image")
diff --git a/mealie/routes/recipe/tag_routes.py b/mealie/routes/recipe/tag_routes.py
index 4fa542ac7947..65dfbdc1916e 100644
--- a/mealie/routes/recipe/tag_routes.py
+++ b/mealie/routes/recipe/tag_routes.py
@@ -2,6 +2,7 @@ from fastapi import APIRouter, Depends
from mealie.db.database import db
from mealie.db.db_setup import generate_session
from mealie.routes.deps import get_current_user
+from mealie.schema.category import RecipeTagResponse, TagIn
from mealie.schema.snackbar import SnackResponse
from sqlalchemy.orm.session import Session
@@ -18,8 +19,16 @@ async def get_all_recipe_tags(session: Session = Depends(generate_session)):
""" Returns a list of available tags in the database """
return db.tags.get_all_limit_columns(session, ["slug", "name"])
+@router.post("")
+async def create_recipe_tag(
+ tag: TagIn, session: Session = Depends(generate_session), current_user=Depends(get_current_user)
+):
+ """ Creates a Tag in the database """
-@router.get("/{tag}")
+ return db.tags.create(session, tag.dict())
+
+
+@router.get("/{tag}", response_model=RecipeTagResponse)
def get_all_recipes_by_tag(tag: str, session: Session = Depends(generate_session)):
""" Returns a list of recipes associated with the provided tag. """
return db.tags.get(session, tag)
diff --git a/mealie/routes/users/crud.py b/mealie/routes/users/crud.py
index f15d5dd8caf9..774d867c3a9a 100644
--- a/mealie/routes/users/crud.py
+++ b/mealie/routes/users/crud.py
@@ -4,7 +4,7 @@ from datetime import timedelta
from fastapi import APIRouter, Depends, File, UploadFile
from fastapi.responses import FileResponse
from mealie.core import security
-from mealie.core.config import settings, app_dirs
+from mealie.core.config import app_dirs, settings
from mealie.core.security import get_password_hash, verify_password
from mealie.db.database import db
from mealie.db.db_setup import generate_session
diff --git a/mealie/run.sh b/mealie/run.sh
index 5e51291a0db9..12a5d142d674 100755
--- a/mealie/run.sh
+++ b/mealie/run.sh
@@ -1,13 +1,32 @@
#!/bin/sh
-# Initialize Database Prerun
-python mealie/db/init_db.py
+# Get Reload Arg `run.sh reload` for dev server
+ARG1=${1:-production}
-## Migrations
+# Set Script Directory - Used for running the script from a different directory.
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
+
+# # Initialize Database Prerun
+poetry run python $DIR/db/init_db.py
+poetry run python $DIR/services/image/minify.py
+
+# Migrations
# TODO
+ # Migrations
+ # Set Port from ENV Variable
-## Web Server
-caddy start --config ./Caddyfile
+if [[ "$ARG1" = "reload" ]]
+then
+ echo "Hot Reload!"
+
+ # Start API
+ uvicorn mealie.app:app --host 0.0.0.0 --port 9000 --reload
+else
+ echo "Production"
+ # Web Server
+ caddy start --config ./Caddyfile
+
+ # Start API
+ uvicorn mealie.app:app --host 0.0.0.0 --port 9000
+fi
-# Start API
-uvicorn mealie.app:app --host 0.0.0.0 --port 9000
\ No newline at end of file
diff --git a/mealie/schema/category.py b/mealie/schema/category.py
index 3e8635152545..5e7f1c842a05 100644
--- a/mealie/schema/category.py
+++ b/mealie/schema/category.py
@@ -23,9 +23,13 @@ class RecipeCategoryResponse(CategoryBase):
schema_extra = {"example": {"id": 1, "name": "dinner", "recipes": [{}]}}
+class TagIn(CategoryIn):
+ pass
+
+
class TagBase(CategoryBase):
pass
-class RecipeTagResponse(TagBase):
+class RecipeTagResponse(RecipeCategoryResponse):
pass
diff --git a/mealie/schema/debug.py b/mealie/schema/debug.py
new file mode 100644
index 000000000000..4cb94396da37
--- /dev/null
+++ b/mealie/schema/debug.py
@@ -0,0 +1,15 @@
+from pathlib import Path
+from fastapi_camelcase import CamelModel
+
+
+class AppInfo(CamelModel):
+ production: bool
+ version: str
+ demo_status: bool
+
+class DebugInfo(AppInfo):
+ api_port: int
+ api_docs: bool
+ db_type: str
+ sqlite_file: Path
+ default_group: str
\ No newline at end of file
diff --git a/mealie/schema/recipe.py b/mealie/schema/recipe.py
index be1df086e8e1..3ea0b9aee2a8 100644
--- a/mealie/schema/recipe.py
+++ b/mealie/schema/recipe.py
@@ -34,12 +34,30 @@ class Nutrition(BaseModel):
orm_mode = True
-class Recipe(BaseModel):
+class RecipeSummary(BaseModel):
name: str
- description: Optional[str]
+ slug: Optional[str] = ""
image: Optional[Any]
- recipeYield: Optional[str]
+
+ description: Optional[str]
recipeCategory: Optional[List[str]] = []
+ tags: Optional[List[str]] = []
+ rating: Optional[int]
+
+ class Config:
+ orm_mode = True
+
+ @classmethod
+ def getter_dict(_cls, name_orm: RecipeModel):
+ return {
+ **GetterDict(name_orm),
+ "recipeCategory": [x.name for x in name_orm.recipeCategory],
+ "tags": [x.name for x in name_orm.tags],
+ }
+
+
+class Recipe(RecipeSummary):
+ recipeYield: Optional[str]
recipeIngredient: Optional[list[str]]
recipeInstructions: Optional[list[RecipeStep]]
nutrition: Optional[Nutrition]
@@ -50,11 +68,8 @@ class Recipe(BaseModel):
performTime: Optional[str] = None
# Mealie Specific
- slug: Optional[str] = ""
- tags: Optional[List[str]] = []
dateAdded: Optional[datetime.date]
notes: Optional[List[RecipeNote]] = []
- rating: Optional[int]
orgURL: Optional[str]
extras: Optional[dict] = {}
diff --git a/mealie/services/backups/exports.py b/mealie/services/backups/exports.py
index 52dffa018b44..86220ae8585c 100644
--- a/mealie/services/backups/exports.py
+++ b/mealie/services/backups/exports.py
@@ -65,8 +65,7 @@ class ExportDatabase:
f.write(content)
def export_images(self):
- for file in app_dirs.IMG_DIR.iterdir():
- shutil.copy(file, self.img_dir.joinpath(file.name))
+ shutil.copytree(app_dirs.IMG_DIR, self.img_dir, dirs_exist_ok=True)
def export_items(self, items: list[BaseModel], folder_name: str, export_list=True):
items = [x.dict() for x in items]
diff --git a/mealie/services/backups/imports.py b/mealie/services/backups/imports.py
index 77842f86012a..2b8bf33e5533 100644
--- a/mealie/services/backups/imports.py
+++ b/mealie/services/backups/imports.py
@@ -11,6 +11,7 @@ from mealie.schema.restore import CustomPageImport, GroupImport, RecipeImport, S
from mealie.schema.settings import CustomPageOut, SiteSettings
from mealie.schema.theme import SiteTheme
from mealie.schema.user import UpdateGroup, UserInDB
+from mealie.services.image import minify
from pydantic.main import BaseModel
from sqlalchemy.orm.session import Session
@@ -108,7 +109,13 @@ class ImportDatabase:
image_dir = self.import_dir.joinpath("images")
for image in image_dir.iterdir():
if image.stem in successful_imports:
- shutil.copy(image, app_dirs.IMG_DIR)
+ if image.is_dir():
+ dest = app_dirs.IMG_DIR.joinpath(image.stem)
+ shutil.copytree(image, dest, dirs_exist_ok=True)
+ if image.is_file():
+ shutil.copy(image, app_dirs.IMG_DIR)
+
+ minify.migrate_images()
def import_themes(self):
themes_file = self.import_dir.joinpath("themes", "themes.json")
diff --git a/dev/data/img/.gitkeep b/mealie/services/image/__init__.py
similarity index 100%
rename from dev/data/img/.gitkeep
rename to mealie/services/image/__init__.py
diff --git a/mealie/services/image/image.py b/mealie/services/image/image.py
new file mode 100644
index 000000000000..04e01a492456
--- /dev/null
+++ b/mealie/services/image/image.py
@@ -0,0 +1,112 @@
+import shutil
+from dataclasses import dataclass
+from pathlib import Path
+from typing import Union
+
+import requests
+from fastapi.logger import logger
+from mealie.core.config import app_dirs
+from mealie.services.image import minify
+
+
+@dataclass
+class ImageOptions:
+ ORIGINAL_IMAGE: str = "original*"
+ MINIFIED_IMAGE: str = "min-original*"
+ TINY_IMAGE: str = "tiny-original*"
+
+
+IMG_OPTIONS = ImageOptions()
+
+
+def read_image(recipe_slug: str, image_type: str = "original") -> Path:
+ """returns the path to the image file for the recipe base of image_type
+
+ Args:
+ recipe_slug (str): Recipe Slug
+ image_type (str, optional): Glob Style Matcher "original*" | "min-original* | "tiny-original*"
+
+ Returns:
+ Path: [description]
+ """
+ recipe_slug = recipe_slug.split(".")[0] # Incase of File Name
+ recipe_image_dir = app_dirs.IMG_DIR.joinpath(recipe_slug)
+
+ for file in recipe_image_dir.glob(image_type):
+ return file
+
+ return None
+
+
+def rename_image(original_slug, new_slug) -> Path:
+ current_path = app_dirs.IMG_DIR.joinpath(original_slug)
+ new_path = app_dirs.IMG_DIR.joinpath(new_slug)
+
+ try:
+ new_path = current_path.rename(new_path)
+ except FileNotFoundError:
+ logger.error(f"Image Directory {original_slug} Doesn't Exist")
+
+ return new_path
+
+
+def write_image(recipe_slug: str, file_data: bytes, extension: str) -> Path.name:
+ try:
+ delete_image(recipe_slug)
+ except:
+ pass
+
+ image_dir = Path(app_dirs.IMG_DIR.joinpath(f"{recipe_slug}"))
+ image_dir.mkdir()
+ extension = extension.replace(".", "")
+ image_path = image_dir.joinpath(f"original.{extension}")
+
+ if isinstance(file_data, bytes):
+ with open(image_path, "ab") as f:
+ f.write(file_data)
+ else:
+ with open(image_path, "ab") as f:
+ shutil.copyfileobj(file_data, f)
+
+ minify.migrate_images()
+
+ return image_path
+
+
+def delete_image(recipe_slug: str) -> str:
+ recipe_slug = recipe_slug.split(".")[0]
+ for file in app_dirs.IMG_DIR.glob(f"{recipe_slug}*"):
+ return shutil.rmtree(file)
+
+
+def scrape_image(image_url: str, slug: str) -> Path:
+ if isinstance(image_url, str): # Handles String Types
+ image_url = image_url
+
+ if isinstance(image_url, list): # Handles List Types
+ image_url = image_url[0]
+
+ if isinstance(image_url, dict): # Handles Dictionary Types
+ for key in image_url:
+ if key == "url":
+ image_url = image_url.get("url")
+
+ filename = slug + "." + image_url.split(".")[-1]
+ filename = app_dirs.IMG_DIR.joinpath(filename)
+
+ try:
+ r = requests.get(image_url, stream=True)
+ except:
+ logger.exception("Fatal Image Request Exception")
+ return None
+
+ if r.status_code == 200:
+ r.raw.decode_content = True
+
+ write_image(slug, r.raw, filename.suffix)
+
+ filename.unlink()
+
+ return slug
+
+ return None
diff --git a/mealie/services/image/minify.py b/mealie/services/image/minify.py
new file mode 100644
index 000000000000..024656a50314
--- /dev/null
+++ b/mealie/services/image/minify.py
@@ -0,0 +1,113 @@
+import shutil
+from pathlib import Path
+
+from fastapi.logger import logger
+from mealie.core.config import app_dirs
+from mealie.db.database import db
+from mealie.db.db_setup import create_session
+from PIL import Image
+from sqlalchemy.orm.session import Session
+
+
+def minify_image(image_file: Path, min_dest: Path, tiny_dest: Path):
+ """Minifies an image in it's original file format. Quality is lost
+
+ Args:
+ my_path (Path): Source Files
+ min_dest (Path): FULL Destination File Path
+ tiny_dest (Path): FULL Destination File Path
+ """
+ try:
+ img = Image.open(image_file)
+ basewidth = 720
+ wpercent = basewidth / float(img.size[0])
+ hsize = int((float(img.size[1]) * float(wpercent)))
+ img = img.resize((basewidth, hsize), Image.ANTIALIAS)
+ img.save(min_dest, quality=70)
+
+ tiny_image = crop_center(img)
+ tiny_image.save(tiny_dest, quality=70)
+
+ except Exception:
+ shutil.copy(image_file, min_dest)
+ shutil.copy(image_file, tiny_dest)
+
+
+def crop_center(pil_img, crop_width=300, crop_height=300):
+ img_width, img_height = pil_img.size
+ return pil_img.crop(
+ (
+ (img_width - crop_width) // 2,
+ (img_height - crop_height) // 2,
+ (img_width + crop_width) // 2,
+ (img_height + crop_height) // 2,
+ )
+ )
+
+
+def sizeof_fmt(size, decimal_places=2):
+ for unit in ["B", "kB", "MB", "GB", "TB", "PB"]:
+ if size < 1024.0 or unit == "PiB":
+ break
+ size /= 1024.0
+ return f"{size:.{decimal_places}f} {unit}"
+
+
+def move_all_images():
+ for image_file in app_dirs.IMG_DIR.iterdir():
+ if image_file.is_file():
+ if image_file.name == ".DS_Store":
+ continue
+ new_folder = app_dirs.IMG_DIR.joinpath(image_file.stem)
+ new_folder.mkdir(parents=True, exist_ok=True)
+ image_file.rename(new_folder.joinpath(f"original{image_file.suffix}"))
+
+
+def validate_slugs_in_database(session: Session = None):
+ def check_image_path(image_name: str, slug_path: str) -> bool:
+ existing_path: Path = app_dirs.IMG_DIR.joinpath(image_name)
+ slug_path: Path = app_dirs.IMG_DIR.joinpath(slug_path)
+
+ if existing_path.is_dir():
+ slug_path.rename(existing_path)
+ else:
+ logger.info("No Image Found")
+
+ session = session or create_session()
+ all_recipes = db.recipes.get_all(session)
+
+ slugs_and_images = [(x.slug, x.image) for x in all_recipes]
+
+ for slug, image in slugs_and_images:
+ image_slug = image.split(".")[0] # Remove Extension
+ if slug != image_slug:
+ logger.info(f"{slug}, Doesn't Match '{image_slug}'")
+ check_image_path(image, slug)
+
+
+def migrate_images():
+ print("Checking for Images to Minify...")
+
+ move_all_images()
+
+ # Minify Loop
+ for image in app_dirs.IMG_DIR.glob("*/original.*"):
+ min_dest = image.parent.joinpath(f"min-original{image.suffix}")
+ tiny_dest = image.parent.joinpath(f"tiny-original{image.suffix}")
+
+ if min_dest.exists() and tiny_dest.exists():
+ continue
+
+ minify_image(image, min_dest, tiny_dest)
+
+ org_size = sizeof_fmt(image.stat().st_size)
+ dest_size = sizeof_fmt(min_dest.stat().st_size)
+ tiny_size = sizeof_fmt(tiny_dest.stat().st_size)
+ logger.info(f"{image.name} Minified: {org_size} -> {dest_size} -> {tiny_size}")
+
+ logger.info("Finished Minification Check")
+
+
+if __name__ == "__main__":
+ migrate_images()
+ validate_slugs_in_database()
diff --git a/mealie/services/image_services.py b/mealie/services/image_services.py
deleted file mode 100644
index df793c40ef28..000000000000
--- a/mealie/services/image_services.py
+++ /dev/null
@@ -1,63 +0,0 @@
-import shutil
-from pathlib import Path
-
-import requests
-from fastapi.logger import logger
-from mealie.core.config import app_dirs
-
-
-def read_image(recipe_slug: str) -> Path:
- if app_dirs.IMG_DIR.joinpath(recipe_slug).is_file():
- return app_dirs.IMG_DIR.joinpath(recipe_slug)
-
- recipe_slug = recipe_slug.split(".")[0]
- for file in app_dirs.IMG_DIR.glob(f"{recipe_slug}*"):
- return file
-
-
-def write_image(recipe_slug: str, file_data: bytes, extension: str) -> Path.name:
- delete_image(recipe_slug)
-
- image_path = Path(app_dirs.IMG_DIR.joinpath(f"{recipe_slug}.{extension}"))
- with open(image_path, "ab") as f:
- f.write(file_data)
-
- return image_path
-
-
-def delete_image(recipe_slug: str) -> str:
- recipe_slug = recipe_slug.split(".")[0]
- for file in app_dirs.IMG_DIR.glob(f"{recipe_slug}*"):
- return file.unlink()
-
-
-def scrape_image(image_url: str, slug: str) -> Path:
- if isinstance(image_url, str): # Handles String Types
- image_url = image_url
-
- if isinstance(image_url, list): # Handles List Types
- image_url = image_url[0]
-
- if isinstance(image_url, dict): # Handles Dictionary Types
- for key in image_url:
- if key == "url":
- image_url = image_url.get("url")
-
- filename = slug + "." + image_url.split(".")[-1]
- filename = app_dirs.IMG_DIR.joinpath(filename)
-
- try:
- r = requests.get(image_url, stream=True)
- except:
- logger.exception("Fatal Image Request Exception")
- return None
-
- if r.status_code == 200:
- r.raw.decode_content = True
-
- with open(filename, "wb") as f:
- shutil.copyfileobj(r.raw, f)
-
- return filename
-
- return None
diff --git a/mealie/services/meal_services.py b/mealie/services/meal_services.py
index 46ff1fca0d6b..7f5aaefa5c2c 100644
--- a/mealie/services/meal_services.py
+++ b/mealie/services/meal_services.py
@@ -52,7 +52,7 @@ def get_todays_meal(session: Session, group: Union[int, GroupInDB]) -> Recipe:
Returns:
Recipe: Pydantic Recipe Object
"""
- session = session if session else create_session()
+ session = session or create_session()
if isinstance(group, int):
group: GroupInDB = db.groups.get(session, group)
diff --git a/mealie/services/migrations/chowdown.py b/mealie/services/migrations/chowdown.py
index 0f09b4fa685d..b1c0bb7de8d3 100644
--- a/mealie/services/migrations/chowdown.py
+++ b/mealie/services/migrations/chowdown.py
@@ -6,6 +6,7 @@ from fastapi.logger import logger
from mealie.core.config import app_dirs
from mealie.db.database import db
from mealie.schema.recipe import Recipe
+from mealie.services.image.minify import migrate_images
from mealie.utils.unzip import unpack_zip
from sqlalchemy.orm.session import Session
@@ -89,4 +90,5 @@ def chowdown_migrate(session: Session, zip_file: Path):
failed_images.append(image.name)
report = {"successful": successful_recipes, "failed": failed_recipes}
+ migrate_images()
return report
diff --git a/mealie/services/migrations/nextcloud.py b/mealie/services/migrations/nextcloud.py
index dd20821b03b1..08bf93966072 100644
--- a/mealie/services/migrations/nextcloud.py
+++ b/mealie/services/migrations/nextcloud.py
@@ -7,6 +7,7 @@ from pathlib import Path
from mealie.core.config import app_dirs
from mealie.db.database import db
from mealie.schema.recipe import Recipe
+from mealie.services.image import minify
from mealie.services.scraper.cleaner import Cleaner
@@ -23,39 +24,43 @@ def process_selection(selection: Path) -> Path:
return None
+def clean_nextcloud_tags(nextcloud_tags: str):
+ if not isinstance(nextcloud_tags, str):
+ return None
+
+ return [x.title().lstrip() for x in nextcloud_tags.split(",") if x != ""]
+
+
def import_recipes(recipe_dir: Path) -> Recipe:
image = False
+
for file in recipe_dir.glob("full.*"):
image = file
+ break
for file in recipe_dir.glob("*.json"):
recipe_file = file
+ break
with open(recipe_file, "r") as f:
recipe_dict = json.loads(f.read())
recipe_data = Cleaner.clean(recipe_dict)
- image_name = None
- if image:
- image_name = recipe_data["slug"] + image.suffix
- recipe_data["image"] = image_name
- else:
- recipe_data["image"] = "none"
+ image_name = recipe_data["slug"]
+ recipe_data["image"] = recipe_data["slug"]
+ recipe_data["tags"] = clean_nextcloud_tags(recipe_data.get("keywords"))
recipe = Recipe(**recipe_data)
if image:
- shutil.copy(image, app_dirs.IMG_DIR.joinpath(image_name))
+ shutil.copy(image, app_dirs.IMG_DIR.joinpath(image_name + image.suffix))
return recipe
def prep():
- try:
- shutil.rmtree(app_dirs.TEMP_DIR)
- except:
- pass
+ shutil.rmtree(app_dirs.TEMP_DIR, ignore_errors=True)
app_dirs.TEMP_DIR.mkdir(exist_ok=True, parents=True)
@@ -80,11 +85,13 @@ def migrate(session, selection: str):
db.recipes.create(session, recipe.dict())
successful_imports.append(recipe.name)
- except:
+ except Exception:
+ session.rollback()
logging.error(f"Failed Nextcloud Import: {dir.name}")
logging.exception("")
failed_imports.append(dir.name)
cleanup()
+ minify.migrate_images()
return {"successful": successful_imports, "failed": failed_imports}
diff --git a/mealie/services/scraper/cleaner.py b/mealie/services/scraper/cleaner.py
index 9a0dba2a1b17..7028ad590e79 100644
--- a/mealie/services/scraper/cleaner.py
+++ b/mealie/services/scraper/cleaner.py
@@ -1,6 +1,6 @@
import html
import re
-from datetime import datetime
+from datetime import datetime, timedelta
from typing import List
from slugify import slugify
@@ -10,8 +10,6 @@ class Cleaner:
"""A Namespace for utility function to clean recipe data extracted
from a url and returns a dictionary that is ready for import into
the database. Cleaner.clean is the main entrypoint
-
-
"""
@staticmethod
@@ -44,7 +42,7 @@ class Cleaner:
@staticmethod
def category(category: str):
- if isinstance(category, str):
+ if isinstance(category, str) and category != "":
return [category]
else:
return []
@@ -144,7 +142,100 @@ class Cleaner:
def time(time_entry):
if time_entry is None:
return None
+ elif isinstance(time_entry, timedelta):
+ pretty_print_timedelta(time_entry)
elif isinstance(time_entry, datetime):
print(time_entry)
+ elif isinstance(time_entry, str):
+ if re.match("PT.*H.*M", time_entry):
+ time_delta_object = parse_duration(time_entry)
+ return pretty_print_timedelta(time_delta_object)
else:
return str(time_entry)
+
+
+# ! TODO: Cleanup Code Below
+
+
+def parse_duration(iso_duration):
+ """Parses an ISO 8601 duration string into a datetime.timedelta instance.
+ Args:
+ iso_duration: an ISO 8601 duration string.
+ Returns:
+ a datetime.timedelta instance
+ """
+ m = re.match(r"^P(?:(\d+)Y)?(?:(\d+)M)?(?:(\d+)D)?T(?:(\d+)H)?(?:(\d+)M)?(?:(\d+(?:.\d+)?)S)?$", iso_duration)
+ if m is None:
+ raise ValueError("invalid ISO 8601 duration string")
+
+ days = 0
+ hours = 0
+ minutes = 0
+ seconds = 0.0
+
+ # Years and months are not being utilized here, as there is not enough
+ # information provided to determine which year and which month.
+ # Python's time_delta class stores durations as days, seconds and
+ # microseconds internally, and therefore we'd have to
+ # convert parsed years and months to specific number of days.
+
+ if m[3]:
+ days = int(m[3])
+ if m[4]:
+ hours = int(m[4])
+ if m[5]:
+ minutes = int(m[5])
+ if m[6]:
+ seconds = float(m[6])
+
+ return timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds)
+
+
+def pretty_print_timedelta(t, max_components=None, max_decimal_places=2):
+ """
+ Print a pretty string for a timedelta.
+ For example datetime.timedelta(days=2, seconds=17280) will be printed as '2 days, 4 hours, 48 minutes'. Setting max_components to e.g. 1 will change this to '2.2 days', where the
+ number of decimal points can also be set.
+ """
+ time_scales = [
+ timedelta(days=365),
+ timedelta(days=1),
+ timedelta(hours=1),
+ timedelta(minutes=1),
+ timedelta(seconds=1),
+ timedelta(microseconds=1000),
+ timedelta(microseconds=1),
+ ]
+ time_scale_names_dict = {
+ timedelta(days=365): "year",
+ timedelta(days=1): "day",
+ timedelta(hours=1): "Hour",
+ timedelta(minutes=1): "Minute",
+ timedelta(seconds=1): "Second",
+ timedelta(microseconds=1000): "millisecond",
+ timedelta(microseconds=1): "microsecond",
+ }
+ count = 0
+ txt = ""
+ first = True
+ for scale in time_scales:
+ if t >= scale:
+ count += 1
+ n = t / scale if count == max_components else int(t / scale)
+ t -= n * scale
+
+ n_txt = str(round(n, max_decimal_places))
+ if n_txt[-2:] == ".0":
+ n_txt = n_txt[:-2]
+ txt += "{}{} {}{}".format(
+ "" if first else " ",
+ n_txt,
+ time_scale_names_dict[scale],
+ "s" if n > 1 else "",
+ )
+ if first:
+ first = False
+
+ if len(txt) == 0:
+ txt = "none"
+ return txt
diff --git a/mealie/services/scraper/scraper.py b/mealie/services/scraper/scraper.py
index 4ba9028e7f3d..4518270524b4 100644
--- a/mealie/services/scraper/scraper.py
+++ b/mealie/services/scraper/scraper.py
@@ -5,7 +5,7 @@ import requests
import scrape_schema_recipe
from mealie.core.config import app_dirs
from fastapi.logger import logger
-from mealie.services.image_services import scrape_image
+from mealie.services.image.image import scrape_image
from mealie.schema.recipe import Recipe
from mealie.services.scraper import open_graph
from mealie.services.scraper.cleaner import Cleaner
diff --git a/poetry.lock b/poetry.lock
index ca38853c4039..20b2ae91c440 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -50,7 +50,7 @@ zookeeper = ["kazoo"]
[[package]]
name = "astroid"
-version = "2.5.1"
+version = "2.5.2"
description = "An abstract syntax tree for Python with inference support."
category = "dev"
optional = false
@@ -191,11 +191,11 @@ toml = ["toml"]
[[package]]
name = "decorator"
-version = "4.4.2"
+version = "5.0.4"
description = "Decorators for Humans"
category = "main"
optional = false
-python-versions = ">=2.6, !=3.0.*, !=3.1.*"
+python-versions = ">=3.5"
[[package]]
name = "ecdsa"
@@ -412,11 +412,11 @@ python-versions = "*"
[[package]]
name = "lazy-object-proxy"
-version = "1.5.2"
+version = "1.6.0"
description = "A fast and thorough lazy object proxy."
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
[[package]]
name = "livereload"
@@ -606,6 +606,14 @@ category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+[[package]]
+name = "pillow"
+version = "8.2.0"
+description = "Python Imaging Library (Fork)"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
[[package]]
name = "pluggy"
version = "0.13.1"
@@ -690,14 +698,14 @@ python-versions = "*"
[[package]]
name = "pylint"
-version = "2.7.2"
+version = "2.7.4"
description = "python code static checker"
category = "dev"
optional = false
python-versions = "~=3.6"
[package.dependencies]
-astroid = ">=2.5.1,<2.6"
+astroid = ">=2.5.2,<2.7"
colorama = {version = "*", markers = "sys_platform == \"win32\""}
isort = ">=4.2.5,<6"
mccabe = ">=0.6,<0.7"
@@ -943,7 +951,7 @@ python-versions = ">=3.6"
[[package]]
name = "sqlalchemy"
-version = "1.4.2"
+version = "1.4.5"
description = "Database Abstraction Library"
category = "main"
optional = false
@@ -954,6 +962,7 @@ greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\""}
[package.extras]
aiomysql = ["greenlet (!=0.4.17)", "aiomysql"]
+aiosqlite = ["greenlet (!=0.4.17)", "aiosqlite"]
asyncio = ["greenlet (!=0.4.17)"]
mariadb_connector = ["mariadb (>=1.0.1)"]
mssql = ["pyodbc"]
@@ -969,6 +978,7 @@ postgresql_pg8000 = ["pg8000 (>=1.16.6)"]
postgresql_psycopg2binary = ["psycopg2-binary"]
postgresql_psycopg2cffi = ["psycopg2cffi"]
pymysql = ["pymysql (<1)", "pymysql"]
+sqlcipher = ["sqlcipher3-binary"]
[[package]]
name = "starlette"
@@ -1154,7 +1164,7 @@ python-versions = "*"
[metadata]
lock-version = "1.1"
python-versions = "^3.9"
-content-hash = "a6c10e179bc15efc30627c9793218bb944f43dce5e624a7bcabcc47545e661e8"
+content-hash = "a81463b941cfdbc0e32e215644b172ec1111d5ada27864292d299d7d64fae4cf"
[metadata.files]
aiofiles = [
@@ -1174,8 +1184,8 @@ apscheduler = [
{file = "APScheduler-3.7.0.tar.gz", hash = "sha256:1cab7f2521e107d07127b042155b632b7a1cd5e02c34be5a28ff62f77c900c6a"},
]
astroid = [
- {file = "astroid-2.5.1-py3-none-any.whl", hash = "sha256:21d735aab248253531bb0f1e1e6d068f0ee23533e18ae8a6171ff892b98297cf"},
- {file = "astroid-2.5.1.tar.gz", hash = "sha256:cfc35498ee64017be059ceffab0a25bedf7548ab76f2bea691c5565896e7128d"},
+ {file = "astroid-2.5.2-py3-none-any.whl", hash = "sha256:cd80bf957c49765dce6d92c43163ff9d2abc43132ce64d4b1b47717c6d2522df"},
+ {file = "astroid-2.5.2.tar.gz", hash = "sha256:6b0ed1af831570e500e2437625979eaa3b36011f66ddfc4ce930128610258ca9"},
]
atomicwrites = [
{file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
@@ -1312,8 +1322,8 @@ coverage = [
{file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"},
]
decorator = [
- {file = "decorator-4.4.2-py2.py3-none-any.whl", hash = "sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760"},
- {file = "decorator-4.4.2.tar.gz", hash = "sha256:e3a62f0520172440ca0dcc823749319382e377f37f140a0b99ef45fecb84bfe7"},
+ {file = "decorator-5.0.4-py3-none-any.whl", hash = "sha256:7280eff5351d7004144b1f302347328c3d06e84271dbe690a5dc4b17eb586994"},
+ {file = "decorator-5.0.4.tar.gz", hash = "sha256:cdd9d86d8aca11e4496f3cd26d48020db5a2fac247af0e918b3e0bbdb6e4a174"},
]
ecdsa = [
{file = "ecdsa-0.14.1-py2.py3-none-any.whl", hash = "sha256:e108a5fe92c67639abae3260e43561af914e7fd0d27bae6d2ec1312ae7934dfe"},
@@ -1436,30 +1446,28 @@ jstyleson = [
{file = "jstyleson-0.0.2.tar.gz", hash = "sha256:680003f3b15a2959e4e6a351f3b858e3c07dd3e073a0d54954e34d8ea5e1308e"},
]
lazy-object-proxy = [
- {file = "lazy-object-proxy-1.5.2.tar.gz", hash = "sha256:5944a9b95e97de1980c65f03b79b356f30a43de48682b8bdd90aa5089f0ec1f4"},
- {file = "lazy_object_proxy-1.5.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:e960e8be509e8d6d618300a6c189555c24efde63e85acaf0b14b2cd1ac743315"},
- {file = "lazy_object_proxy-1.5.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:522b7c94b524389f4a4094c4bf04c2b02228454ddd17c1a9b2801fac1d754871"},
- {file = "lazy_object_proxy-1.5.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:3782931963dc89e0e9a0ae4348b44762e868ea280e4f8c233b537852a8996ab9"},
- {file = "lazy_object_proxy-1.5.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:429c4d1862f3fc37cd56304d880f2eae5bd0da83bdef889f3bd66458aac49128"},
- {file = "lazy_object_proxy-1.5.2-cp35-cp35m-win32.whl", hash = "sha256:cd1bdace1a8762534e9a36c073cd54e97d517a17d69a17985961265be6d22847"},
- {file = "lazy_object_proxy-1.5.2-cp35-cp35m-win_amd64.whl", hash = "sha256:ddbdcd10eb999d7ab292677f588b658372aadb9a52790f82484a37127a390108"},
- {file = "lazy_object_proxy-1.5.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ecb5dd5990cec6e7f5c9c1124a37cb2c710c6d69b0c1a5c4aa4b35eba0ada068"},
- {file = "lazy_object_proxy-1.5.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:b6577f15d5516d7d209c1a8cde23062c0f10625f19e8dc9fb59268859778d7d7"},
- {file = "lazy_object_proxy-1.5.2-cp36-cp36m-win32.whl", hash = "sha256:c8fe2d6ff0ff583784039d0255ea7da076efd08507f2be6f68583b0da32e3afb"},
- {file = "lazy_object_proxy-1.5.2-cp36-cp36m-win_amd64.whl", hash = "sha256:fa5b2dee0e231fa4ad117be114251bdfe6afe39213bd629d43deb117b6a6c40a"},
- {file = "lazy_object_proxy-1.5.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1d33d6f789697f401b75ce08e73b1de567b947740f768376631079290118ad39"},
- {file = "lazy_object_proxy-1.5.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:57fb5c5504ddd45ed420b5b6461a78f58cbb0c1b0cbd9cd5a43ad30a4a3ee4d0"},
- {file = "lazy_object_proxy-1.5.2-cp37-cp37m-win32.whl", hash = "sha256:e7273c64bccfd9310e9601b8f4511d84730239516bada26a0c9846c9697617ef"},
- {file = "lazy_object_proxy-1.5.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6f4e5e68b7af950ed7fdb594b3f19a0014a3ace0fedb86acb896e140ffb24302"},
- {file = "lazy_object_proxy-1.5.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cadfa2c2cf54d35d13dc8d231253b7985b97d629ab9ca6e7d672c35539d38163"},
- {file = "lazy_object_proxy-1.5.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e7428977763150b4cf83255625a80a23dfdc94d43be7791ce90799d446b4e26f"},
- {file = "lazy_object_proxy-1.5.2-cp38-cp38-win32.whl", hash = "sha256:2f2de8f8ac0be3e40d17730e0600619d35c78c13a099ea91ef7fb4ad944ce694"},
- {file = "lazy_object_proxy-1.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:38c3865bd220bd983fcaa9aa11462619e84a71233bafd9c880f7b1cb753ca7fa"},
- {file = "lazy_object_proxy-1.5.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:8a44e9901c0555f95ac401377032f6e6af66d8fc1fbfad77a7a8b1a826e0b93c"},
- {file = "lazy_object_proxy-1.5.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:fa7fb7973c622b9e725bee1db569d2c2ee64d2f9a089201c5e8185d482c7352d"},
- {file = "lazy_object_proxy-1.5.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:71a1ef23f22fa8437974b2d60fedb947c99a957ad625f83f43fd3de70f77f458"},
- {file = "lazy_object_proxy-1.5.2-cp39-cp39-win32.whl", hash = "sha256:ef3f5e288aa57b73b034ce9c1f1ac753d968f9069cd0742d1d69c698a0167166"},
- {file = "lazy_object_proxy-1.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:37d9c34b96cca6787fe014aeb651217944a967a5b165e2cacb6b858d2997ab84"},
+ {file = "lazy-object-proxy-1.6.0.tar.gz", hash = "sha256:489000d368377571c6f982fba6497f2aa13c6d1facc40660963da62f5c379726"},
+ {file = "lazy_object_proxy-1.6.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:c6938967f8528b3668622a9ed3b31d145fab161a32f5891ea7b84f6b790be05b"},
+ {file = "lazy_object_proxy-1.6.0-cp27-cp27m-win32.whl", hash = "sha256:ebfd274dcd5133e0afae738e6d9da4323c3eb021b3e13052d8cbd0e457b1256e"},
+ {file = "lazy_object_proxy-1.6.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ed361bb83436f117f9917d282a456f9e5009ea12fd6de8742d1a4752c3017e93"},
+ {file = "lazy_object_proxy-1.6.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d900d949b707778696fdf01036f58c9876a0d8bfe116e8d220cfd4b15f14e741"},
+ {file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5743a5ab42ae40caa8421b320ebf3a998f89c85cdc8376d6b2e00bd12bd1b587"},
+ {file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:bf34e368e8dd976423396555078def5cfc3039ebc6fc06d1ae2c5a65eebbcde4"},
+ {file = "lazy_object_proxy-1.6.0-cp36-cp36m-win32.whl", hash = "sha256:b579f8acbf2bdd9ea200b1d5dea36abd93cabf56cf626ab9c744a432e15c815f"},
+ {file = "lazy_object_proxy-1.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:4f60460e9f1eb632584c9685bccea152f4ac2130e299784dbaf9fae9f49891b3"},
+ {file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d7124f52f3bd259f510651450e18e0fd081ed82f3c08541dffc7b94b883aa981"},
+ {file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:22ddd618cefe54305df49e4c069fa65715be4ad0e78e8d252a33debf00f6ede2"},
+ {file = "lazy_object_proxy-1.6.0-cp37-cp37m-win32.whl", hash = "sha256:9d397bf41caad3f489e10774667310d73cb9c4258e9aed94b9ec734b34b495fd"},
+ {file = "lazy_object_proxy-1.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a5045889cc2729033b3e604d496c2b6f588c754f7a62027ad4437a7ecc4837"},
+ {file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:17e0967ba374fc24141738c69736da90e94419338fd4c7c7bef01ee26b339653"},
+ {file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:410283732af311b51b837894fa2f24f2c0039aa7f220135192b38fcc42bd43d3"},
+ {file = "lazy_object_proxy-1.6.0-cp38-cp38-win32.whl", hash = "sha256:85fb7608121fd5621cc4377a8961d0b32ccf84a7285b4f1d21988b2eae2868e8"},
+ {file = "lazy_object_proxy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:d1c2676e3d840852a2de7c7d5d76407c772927addff8d742b9808fe0afccebdf"},
+ {file = "lazy_object_proxy-1.6.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:b865b01a2e7f96db0c5d12cfea590f98d8c5ba64ad222300d93ce6ff9138bcad"},
+ {file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4732c765372bd78a2d6b2150a6e99d00a78ec963375f236979c0626b97ed8e43"},
+ {file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9698110e36e2df951c7c36b6729e96429c9c32b3331989ef19976592c5f3c77a"},
+ {file = "lazy_object_proxy-1.6.0-cp39-cp39-win32.whl", hash = "sha256:1fee665d2638491f4d6e55bd483e15ef21f6c8c2095f235fef72601021e64f61"},
+ {file = "lazy_object_proxy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:f5144c75445ae3ca2057faac03fda5a902eff196702b0a24daf1d6ce0650514b"},
]
livereload = [
{file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"},
@@ -1603,6 +1611,41 @@ pathspec = [
{file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"},
{file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"},
]
+pillow = [
+ {file = "Pillow-8.2.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:dc38f57d8f20f06dd7c3161c59ca2c86893632623f33a42d592f097b00f720a9"},
+ {file = "Pillow-8.2.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a013cbe25d20c2e0c4e85a9daf438f85121a4d0344ddc76e33fd7e3965d9af4b"},
+ {file = "Pillow-8.2.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8bb1e155a74e1bfbacd84555ea62fa21c58e0b4e7e6b20e4447b8d07990ac78b"},
+ {file = "Pillow-8.2.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c5236606e8570542ed424849f7852a0ff0bce2c4c8d0ba05cc202a5a9c97dee9"},
+ {file = "Pillow-8.2.0-cp36-cp36m-win32.whl", hash = "sha256:12e5e7471f9b637762453da74e390e56cc43e486a88289995c1f4c1dc0bfe727"},
+ {file = "Pillow-8.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5afe6b237a0b81bd54b53f835a153770802f164c5570bab5e005aad693dab87f"},
+ {file = "Pillow-8.2.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:cb7a09e173903541fa888ba010c345893cd9fc1b5891aaf060f6ca77b6a3722d"},
+ {file = "Pillow-8.2.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0d19d70ee7c2ba97631bae1e7d4725cdb2ecf238178096e8c82ee481e189168a"},
+ {file = "Pillow-8.2.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:083781abd261bdabf090ad07bb69f8f5599943ddb539d64497ed021b2a67e5a9"},
+ {file = "Pillow-8.2.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:c6b39294464b03457f9064e98c124e09008b35a62e3189d3513e5148611c9388"},
+ {file = "Pillow-8.2.0-cp37-cp37m-win32.whl", hash = "sha256:01425106e4e8cee195a411f729cff2a7d61813b0b11737c12bd5991f5f14bcd5"},
+ {file = "Pillow-8.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3b570f84a6161cf8865c4e08adf629441f56e32f180f7aa4ccbd2e0a5a02cba2"},
+ {file = "Pillow-8.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:031a6c88c77d08aab84fecc05c3cde8414cd6f8406f4d2b16fed1e97634cc8a4"},
+ {file = "Pillow-8.2.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:66cc56579fd91f517290ab02c51e3a80f581aba45fd924fcdee01fa06e635812"},
+ {file = "Pillow-8.2.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6c32cc3145928c4305d142ebec682419a6c0a8ce9e33db900027ddca1ec39178"},
+ {file = "Pillow-8.2.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:624b977355cde8b065f6d51b98497d6cd5fbdd4f36405f7a8790e3376125e2bb"},
+ {file = "Pillow-8.2.0-cp38-cp38-win32.whl", hash = "sha256:5cbf3e3b1014dddc45496e8cf38b9f099c95a326275885199f427825c6522232"},
+ {file = "Pillow-8.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:463822e2f0d81459e113372a168f2ff59723e78528f91f0bd25680ac185cf797"},
+ {file = "Pillow-8.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:95d5ef984eff897850f3a83883363da64aae1000e79cb3c321915468e8c6add5"},
+ {file = "Pillow-8.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b91c36492a4bbb1ee855b7d16fe51379e5f96b85692dc8210831fbb24c43e484"},
+ {file = "Pillow-8.2.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d68cb92c408261f806b15923834203f024110a2e2872ecb0bd2a110f89d3c602"},
+ {file = "Pillow-8.2.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f217c3954ce5fd88303fc0c317af55d5e0204106d86dea17eb8205700d47dec2"},
+ {file = "Pillow-8.2.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5b70110acb39f3aff6b74cf09bb4169b167e2660dabc304c1e25b6555fa781ef"},
+ {file = "Pillow-8.2.0-cp39-cp39-win32.whl", hash = "sha256:a7d5e9fad90eff8f6f6106d3b98b553a88b6f976e51fce287192a5d2d5363713"},
+ {file = "Pillow-8.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:238c197fc275b475e87c1453b05b467d2d02c2915fdfdd4af126145ff2e4610c"},
+ {file = "Pillow-8.2.0-pp36-pypy36_pp73-macosx_10_10_x86_64.whl", hash = "sha256:0e04d61f0064b545b989126197930807c86bcbd4534d39168f4aa5fda39bb8f9"},
+ {file = "Pillow-8.2.0-pp36-pypy36_pp73-manylinux2010_i686.whl", hash = "sha256:63728564c1410d99e6d1ae8e3b810fe012bc440952168af0a2877e8ff5ab96b9"},
+ {file = "Pillow-8.2.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:c03c07ed32c5324939b19e36ae5f75c660c81461e312a41aea30acdd46f93a7c"},
+ {file = "Pillow-8.2.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:4d98abdd6b1e3bf1a1cbb14c3895226816e666749ac040c4e2554231068c639b"},
+ {file = "Pillow-8.2.0-pp37-pypy37_pp73-manylinux2010_i686.whl", hash = "sha256:aac00e4bc94d1b7813fe882c28990c1bc2f9d0e1aa765a5f2b516e8a6a16a9e4"},
+ {file = "Pillow-8.2.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:22fd0f42ad15dfdde6c581347eaa4adb9a6fc4b865f90b23378aa7914895e120"},
+ {file = "Pillow-8.2.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:e98eca29a05913e82177b3ba3d198b1728e164869c613d76d0de4bde6768a50e"},
+ {file = "Pillow-8.2.0.tar.gz", hash = "sha256:a787ab10d7bb5494e5f76536ac460741788f1fbce851068d73a87ca7c35fc3e1"},
+]
pluggy = [
{file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"},
{file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
@@ -1671,8 +1714,8 @@ pyhumps = [
{file = "pyhumps-1.6.1.tar.gz", hash = "sha256:01612603c5ad73a407299d806d30708a3935052276fdd93776953bccc0724e0a"},
]
pylint = [
- {file = "pylint-2.7.2-py3-none-any.whl", hash = "sha256:d09b0b07ba06bcdff463958f53f23df25e740ecd81895f7d2699ec04bbd8dc3b"},
- {file = "pylint-2.7.2.tar.gz", hash = "sha256:0e21d3b80b96740909d77206d741aa3ce0b06b41be375d92e1f3244a274c1f8a"},
+ {file = "pylint-2.7.4-py3-none-any.whl", hash = "sha256:209d712ec870a0182df034ae19f347e725c1e615b2269519ab58a35b3fcbbe7a"},
+ {file = "pylint-2.7.4.tar.gz", hash = "sha256:bd38914c7731cdc518634a8d3c5585951302b6e2b6de60fbb3f7a0220e21eeee"},
]
pymdown-extensions = [
{file = "pymdown-extensions-8.1.1.tar.gz", hash = "sha256:632371fa3bf1b21a0e3f4063010da59b41db049f261f4c0b0872069a9b6d1735"},
@@ -1806,40 +1849,40 @@ soupsieve = [
{file = "soupsieve-2.2.1.tar.gz", hash = "sha256:052774848f448cf19c7e959adf5566904d525f33a3f8b6ba6f6f8f26ec7de0cc"},
]
sqlalchemy = [
- {file = "SQLAlchemy-1.4.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:aed22be55a608787bb6875dbcf3561349a0e88fe33fd88c318c1e5b4eeb2306a"},
- {file = "SQLAlchemy-1.4.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:7e1b0ed6d720750f02333d2f52502dfc2a23185aacc2cc6ce6ec29d28c21397c"},
- {file = "SQLAlchemy-1.4.2-cp27-cp27m-win32.whl", hash = "sha256:9406b96a979ab8d6de5d89f58b1f103c9aeef6fb5367448537a8228619f11258"},
- {file = "SQLAlchemy-1.4.2-cp27-cp27m-win_amd64.whl", hash = "sha256:59ec279f1bd55e1d703e3d4b651600cc463cc3eafa8d8e5a70ab844f736348d4"},
- {file = "SQLAlchemy-1.4.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8cfcfcf2582b19c874fa20d0b75100abe17be80a4c637c0683b4eb919946dfee"},
- {file = "SQLAlchemy-1.4.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:a6b4b7688fe7d251bbae3f9da4a487568bd584d13201bc7591c8639ad01fecdc"},
- {file = "SQLAlchemy-1.4.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0abab6d1044198993256f073340b14c459736777c550a7e914cd00444dcf9c30"},
- {file = "SQLAlchemy-1.4.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:5fb8f6a391992dd6aafe4fdf1dffbf7934fba1f5938593f20b152aa7f9619f82"},
- {file = "SQLAlchemy-1.4.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:97e333260a99d989f2a131aa8aa74140636dfbd030987150cb3748da607ea7db"},
- {file = "SQLAlchemy-1.4.2-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:3fa75c854dba3f9b9c28bc5d88d246f6bc6f20b7480367c65339bcb2864d4707"},
- {file = "SQLAlchemy-1.4.2-cp36-cp36m-win32.whl", hash = "sha256:1b9f3c7b281aa1c3d0c74ef12c4633e5f8358bb94f01be7b964887183fd53e5e"},
- {file = "SQLAlchemy-1.4.2-cp36-cp36m-win_amd64.whl", hash = "sha256:da72e3499bde4548e8b7d7f2ab23ceed09a5bac307bf51057e066c406a0ba2e1"},
- {file = "SQLAlchemy-1.4.2-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:8383292298bb85d7ad79a13c6571aff213b96c49737f3c3af129de63bbfb42c9"},
- {file = "SQLAlchemy-1.4.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4d1447183356c9679853926e81c7ebce3fbca9b1c607ea439975298c72137a36"},
- {file = "SQLAlchemy-1.4.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:ff76d7dbf33f62e30e5a1d1b095d46afcdc49e42cbe33ce12014110147466700"},
- {file = "SQLAlchemy-1.4.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:1ba6922331b3f38e116c9266206b044baf64576e5cebd87917b5ad872d7a025f"},
- {file = "SQLAlchemy-1.4.2-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:d3b2819f4d7ae56191efc6fc456eb1805ada2bd5ba93d918893bc24fa7a1e30c"},
- {file = "SQLAlchemy-1.4.2-cp37-cp37m-win32.whl", hash = "sha256:3b290ff34de625143a05d2d172a88a064bb04a7938265b09d4e4bf45f21948f6"},
- {file = "SQLAlchemy-1.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:5289cafee71037f15feeeaf736f01910b9e3572525b73b201bdd21816db010ed"},
- {file = "SQLAlchemy-1.4.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:0bb04fd7414718fb1f4dfa17efcb0be787363451cf99a5e992728925d298d9ae"},
- {file = "SQLAlchemy-1.4.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:4e88549a5e58ba8c80c5ea071ac3b4e590236672a882bb80f56da4afcee45d96"},
- {file = "SQLAlchemy-1.4.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:edec945ed57d11a1123657e4066f0bf747aaa93c8a65ec1c2c98172d1f2a9b7d"},
- {file = "SQLAlchemy-1.4.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:06125670280111e39014af87f14d74599fd4b39a512c74f1a10e21e5626eb158"},
- {file = "SQLAlchemy-1.4.2-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:e1692bdf1b95c97caab1201773a4576f59627997f598d30bdadc50dd9f897fec"},
- {file = "SQLAlchemy-1.4.2-cp38-cp38-win32.whl", hash = "sha256:65c4df9517da9cce2c1255282d3e39f2afbc3a02deba60d99b0a3283ae80ec0b"},
- {file = "SQLAlchemy-1.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:c6197c88ad53c31f58de5a8180936b8ef027356e788cd5f6514b3439d3d897ac"},
- {file = "SQLAlchemy-1.4.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:6d6115edf1297bfa58994986ffe0dff21af18f0cba51dfa6d1769aa8a277be32"},
- {file = "SQLAlchemy-1.4.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:facacaea95e0822f7bbeaa6909b30b2836b14cff8790209d52a0c866e240b673"},
- {file = "SQLAlchemy-1.4.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:6e517126d3bc13d455826befdc35a89f82f01d163848f68db02caa80d25433fc"},
- {file = "SQLAlchemy-1.4.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:09b08eb1bea621e47c2b0fcb0334fcbb00e1da2a3c2d45a98e56cd072b840719"},
- {file = "SQLAlchemy-1.4.2-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:7eba42098a13a3bcd509080b5e44d73783d9129ba0383793979bf518d01e8bb3"},
- {file = "SQLAlchemy-1.4.2-cp39-cp39-win32.whl", hash = "sha256:920db115eb06fc507fe2c774fb5c82a898b05dffbdadc7fafad51ce2cfd8c549"},
- {file = "SQLAlchemy-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:dcde5067a7dab1ff2eaea2f3622b2055c5225ce2aaf589c5a4c703d43519c4ba"},
- {file = "SQLAlchemy-1.4.2.tar.gz", hash = "sha256:6a8e4c2e65028933a6dc8643c8f5a4f295a367131195b3c708634925cb3e8ec1"},
+ {file = "SQLAlchemy-1.4.5-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:c3810ebcf1d42c532c8f5c3f442c705d94442a27a32f2df5344f0857306ab321"},
+ {file = "SQLAlchemy-1.4.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:7481f9c2c832a3bf37c80bee44d91ac9938b815cc06f7e795b976e300914aab9"},
+ {file = "SQLAlchemy-1.4.5-cp27-cp27m-win32.whl", hash = "sha256:94040a92b6676f9ffdab6c6b479b3554b927a635c90698c761960b266b04fc88"},
+ {file = "SQLAlchemy-1.4.5-cp27-cp27m-win_amd64.whl", hash = "sha256:02b039e0e7e6de2f15ea2d2de3995e31a170e700ec0b37b4eded662171711d19"},
+ {file = "SQLAlchemy-1.4.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:f16801795f1ffe9472360589a04301018c79e4582a85e68067275bb4f765e4e2"},
+ {file = "SQLAlchemy-1.4.5-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:82f11b679df91275788be6734dd4a9dfa29bac67b85326992609f62b05bdab37"},
+ {file = "SQLAlchemy-1.4.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:a08027ae84efc563f0f2f341dda572eadebeca38c0ae028a009988f27e9e6230"},
+ {file = "SQLAlchemy-1.4.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:70a1387396ea5b3022539b560c287daf79403d8b4b365f89b56d660e625a4457"},
+ {file = "SQLAlchemy-1.4.5-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:4f7ce3bfdab6520554af4a5b1df4513d45388624d015ba4d921daf48ce1d6503"},
+ {file = "SQLAlchemy-1.4.5-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:08943201a1e3c6238e48f4d5d56c27ea1e1b39d3d9f36a9d81fc3cfb0e1b83bd"},
+ {file = "SQLAlchemy-1.4.5-cp36-cp36m-win32.whl", hash = "sha256:fbb0fda1c574975807aceb0e2332e0ecfe9e5656c191ed482c1a5eafe7a33823"},
+ {file = "SQLAlchemy-1.4.5-cp36-cp36m-win_amd64.whl", hash = "sha256:8d6a9feb5efd2fdab25c6d5a0a5589fed9d789f5ec57ec12263fd0e60ce1dea6"},
+ {file = "SQLAlchemy-1.4.5-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:c22bfac8d3b955cdb13f0fcd6343156bf56d925196cf7d9ab9ce9f61d3f1e11c"},
+ {file = "SQLAlchemy-1.4.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:7c0c7bb49167ac738ca6ee6e7f94a9988a7e4e261d8da335341e8c8c8f3b2e9b"},
+ {file = "SQLAlchemy-1.4.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:344b58b4b4193b72e8b768a51ef6eb5a4c948ce313a0f23e2ea081e71ce8ac0e"},
+ {file = "SQLAlchemy-1.4.5-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:48540072f43b3c080159ec1f24a4b014c0ee83d3b73795399974aa358a8cf71b"},
+ {file = "SQLAlchemy-1.4.5-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:81badd7d3e0e6aba70a5d1b50fabe8112e9835a6fdb0684054c3fe5378ce0d01"},
+ {file = "SQLAlchemy-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:a103294583383660d9e06dbd82037dc8e94c184bdcb27b2be44ae4457dafc6b4"},
+ {file = "SQLAlchemy-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5361e25181b9872d6906c8c9be7dc05cb0a0951d71ee59ee5a71c1deb301b8a8"},
+ {file = "SQLAlchemy-1.4.5-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:7f5087104c3c5af11ea59e49ae66c33ca98b14a47d3796ae97498fca53f84aef"},
+ {file = "SQLAlchemy-1.4.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:11e7a86209f69273e75d2dd64b06c0c2660e39cd942fce2170515c404ed7358a"},
+ {file = "SQLAlchemy-1.4.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:8301ecf3e819eb5dbc171e84654ff60872807775301a55fe35b0ab2ba3742031"},
+ {file = "SQLAlchemy-1.4.5-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:44e11a06168782b6d485daef197783366ce7ab0d5eea0066c899ae06cef47bbc"},
+ {file = "SQLAlchemy-1.4.5-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:6f8fdad2f335d2f3ca2f3ee3b01404f7abcf519b03de2c510f1f42d16e39ffb4"},
+ {file = "SQLAlchemy-1.4.5-cp38-cp38-win32.whl", hash = "sha256:f62c57ceadedeb8e7b98b48ac4d684bf2b0f73b9d882fed3ca260d9aedf6403f"},
+ {file = "SQLAlchemy-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:301d0cd6ef1dc73b607748183da857e712d6f743de8d92b1e1f8facfb0ba2aa2"},
+ {file = "SQLAlchemy-1.4.5-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:915d4fa08776c0252dc5a34fa15c6490f66f411ea1ac9492022f98875d6baf20"},
+ {file = "SQLAlchemy-1.4.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7de84feb31af3d8fdf819cac2042928d0b60d3cb16f49c4b2f48d88db46e79f6"},
+ {file = "SQLAlchemy-1.4.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:45b091ccbf94374ed14abde17e9a04522b0493a17282eaaf4383efdd413f5243"},
+ {file = "SQLAlchemy-1.4.5-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:4df07161897191ed8d4a0cfc92425c81296160e5c5f76c9256716d3085172883"},
+ {file = "SQLAlchemy-1.4.5-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:ee4ddc904fb6414b5118af5b8d45e428aac2ccda01326b2ba2fe4354b0d8d1ae"},
+ {file = "SQLAlchemy-1.4.5-cp39-cp39-win32.whl", hash = "sha256:2f11b5783933bff55291ca06496124347627d211ff2e509e846af1c35de0a3fb"},
+ {file = "SQLAlchemy-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:0ee0054d4a598d2920cae14bcbd33e200e02c5e3b47b902627f8cf5d4c9a2a4b"},
+ {file = "SQLAlchemy-1.4.5.tar.gz", hash = "sha256:1294f05916c044631fd626a4866326bbfbd17f62bd37510d000afaef4b35bd74"},
]
starlette = [
{file = "starlette-0.13.6-py3-none-any.whl", hash = "sha256:bd2ffe5e37fb75d014728511f8e68ebf2c80b0fa3d04ca1479f4dc752ae31ac9"},
diff --git a/pyproject.toml b/pyproject.toml
index cdb3721fde25..5af7567e6fea 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -30,6 +30,8 @@ bcrypt = "^3.2.0"
python-jose = "^3.2.0"
passlib = "^1.7.4"
lxml = "4.6.2"
+Pillow = "^8.2.0"
+
[tool.poetry.dev-dependencies]
pylint = "^2.6.0"
diff --git a/tests/unit_tests/test_cleaner.py b/tests/unit_tests/test_cleaner.py
index 9c805cbb6055..b1630a1324a9 100644
--- a/tests/unit_tests/test_cleaner.py
+++ b/tests/unit_tests/test_cleaner.py
@@ -89,3 +89,11 @@ def test_html_with_recipe_data():
assert url_validation_regex.match(recipe_data["image"])
assert len(recipe_data["recipeIngredient"]) == 13
assert len(recipe_data["recipeInstructions"]) == 4
+
+
+def test_time_cleaner():
+
+ my_time_delta = "PT2H30M"
+ return_delta = Cleaner.time(my_time_delta)
+
+ assert return_delta == "2 Hours 30 Minutes"
diff --git a/tests/unit_tests/test_config.py b/tests/unit_tests/test_config.py
index fc88f11b8d29..c2641b428c43 100644
--- a/tests/unit_tests/test_config.py
+++ b/tests/unit_tests/test_config.py
@@ -4,19 +4,39 @@ import pytest
from mealie.core.config import CWD, DATA_DIR, AppDirectories, AppSettings, determine_data_dir, determine_secrets
+def test_default_settings(monkeypatch):
+ monkeypatch.delenv("DEFAULT_GROUP", raising=False)
+ monkeypatch.delenv("DEFAULT_PASSWORD", raising=False)
+ monkeypatch.delenv("API_PORT", raising=False)
+ monkeypatch.delenv("API_DOCS", raising=False)
+ monkeypatch.delenv("DB_TYPE", raising=False)
+ monkeypatch.delenv("IS_DEMO", raising=False)
+
+ app_settings = AppSettings()
+
+ assert app_settings.DEFAULT_GROUP == "Home"
+ assert app_settings.DEFAULT_PASSWORD == "MyPassword"
+ assert app_settings.DATABASE_TYPE == "sqlite"
+ assert app_settings.API_PORT == 9000
+ assert app_settings.API_DOCS is True
+ assert app_settings.IS_DEMO is False
+
+ assert app_settings.REDOC_URL == "/redoc"
+ assert app_settings.DOCS_URL == "/docs"
+
+
def test_non_default_settings(monkeypatch):
monkeypatch.setenv("DEFAULT_GROUP", "Test Group")
monkeypatch.setenv("DEFAULT_PASSWORD", "Test Password")
monkeypatch.setenv("API_PORT", "8000")
- monkeypatch.setenv("API_DOCS", False)
+ monkeypatch.setenv("API_DOCS", 'False')
- app_dirs = AppDirectories(CWD, DATA_DIR)
- app_settings = AppSettings(app_dirs)
+ app_settings = AppSettings()
assert app_settings.DEFAULT_GROUP == "Test Group"
assert app_settings.DEFAULT_PASSWORD == "Test Password"
assert app_settings.API_PORT == 8000
- assert app_settings.API is False
+ assert app_settings.API_DOCS is False
assert app_settings.REDOC_URL is None
assert app_settings.DOCS_URL is None
@@ -25,9 +45,8 @@ def test_non_default_settings(monkeypatch):
def test_unknown_database(monkeypatch):
monkeypatch.setenv("DB_TYPE", "nonsense")
- with pytest.raises(Exception, match="Unable to determine database type. Acceptible options are 'sqlite'"):
- app_dirs = AppDirectories(CWD, DATA_DIR)
- AppSettings(app_dirs)
+ with pytest.raises(ValueError, match="Unable to determine database type. Acceptible options are 'sqlite'"):
+ AppSettings()
def test_secret_generation(tmp_path):
diff --git a/tests/unit_tests/test_nextcloud.py b/tests/unit_tests/test_nextcloud.py
index 3e446e76c695..bb773fc39078 100644
--- a/tests/unit_tests/test_nextcloud.py
+++ b/tests/unit_tests/test_nextcloud.py
@@ -1,3 +1,4 @@
+import shutil
from pathlib import Path
import pytest
@@ -36,4 +37,4 @@ def test_zip_extraction(file_name: str, final_path: Path):
def test_nextcloud_migration(recipe_dir: Path):
recipe = import_recipes(recipe_dir)
assert isinstance(recipe, Recipe)
- app_dirs.IMG_DIR.joinpath(recipe.image).unlink(missing_ok=True)
+ shutil.rmtree(app_dirs.IMG_DIR.joinpath(recipe.image), ignore_errors=True)