Merge branch 'mealie-next' into feat-frontend-access-controll

This commit is contained in:
Kuchenpirat 2024-02-13 18:44:01 +01:00 committed by GitHub
commit 058d968833
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
193 changed files with 1808 additions and 990 deletions

View File

@ -49,7 +49,9 @@
"onCreateCommand": "sudo chown -R vscode:vscode /workspaces/mealie/frontend/node_modules && task setup", "onCreateCommand": "sudo chown -R vscode:vscode /workspaces/mealie/frontend/node_modules && task setup",
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. // Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "vscode", "remoteUser": "vscode",
// "features": { "features": {
// "git": "latest" "ghcr.io/devcontainers/features/docker-in-docker:2": {
// } "dockerDashComposeVersion": "v2"
}
}
} }

View File

@ -49,6 +49,9 @@ jobs:
needs: needs:
- build-release - build-release
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps: steps:
- name: Checkout 🛎 - name: Checkout 🛎
uses: actions/checkout@v4 uses: actions/checkout@v4
@ -58,11 +61,12 @@ jobs:
sed -i 's/:v[0-9]*.[0-9]*.[0-9]*/:${{ github.event.release.tag_name }}/' docs/docs/documentation/getting-started/installation/sqlite.md sed -i 's/:v[0-9]*.[0-9]*.[0-9]*/:${{ github.event.release.tag_name }}/' docs/docs/documentation/getting-started/installation/sqlite.md
sed -i 's/:v[0-9]*.[0-9]*.[0-9]*/:${{ github.event.release.tag_name }}/' docs/docs/documentation/getting-started/installation/postgres.md sed -i 's/:v[0-9]*.[0-9]*.[0-9]*/:${{ github.event.release.tag_name }}/' docs/docs/documentation/getting-started/installation/postgres.md
- name: Commit updates - name: Create Pull Request
uses: test-room-7/action-update-file@v1 uses: peter-evans/create-pull-request@v6
with: with:
file-path: | commit-message: "Update image tag, for release ${{ github.event.release.tag_name }}"
docs/docs/documentation/getting-started/installation/sqlite.md branch: "docs/newrelease-update-version-${{ github.event.release.tag_name }}"
docs/docs/documentation/getting-started/installation/postgres.md delete-branch: true
commit-msg: "Change image tag, for release ${{ github.event.release.tag_name }}" base: mealie-next
github-token: ${{ secrets.GITHUB_TOKEN }} title: "docs(auto): Update image tag, for release ${{ github.event.release.tag_name }}"
body: "Auto-generated by `.github/workflows/release.yml`, on publish of release ${{ github.event.release.tag_name }}"

View File

@ -14,7 +14,9 @@ env:
SMTP_HOST: localhost SMTP_HOST: localhost
SMTP_PORT: 1025 SMTP_PORT: 1025
SMTP_FROM_NAME: MealieDev SMTP_FROM_NAME: MealieDev
SMTP_FROM_EMAIL: mealie@example.com
SMTP_AUTH_STRATEGY: NONE SMTP_AUTH_STRATEGY: NONE
BASE_URL: http://localhost:3000
LANG: en-US LANG: en-US
# loads .env file if it exists # loads .env file if it exists

View File

@ -54,8 +54,8 @@ Changing the webworker settings may cause unforeseen memory leak issues with Mea
| ---------------- | :-----: | --------------------------------------------------------------------------------------------------------------------------------- | | ---------------- | :-----: | --------------------------------------------------------------------------------------------------------------------------------- |
| WEB_GUNICORN | false | Enables Gunicorn to manage Uvicorn web for multiple works | | WEB_GUNICORN | false | Enables Gunicorn to manage Uvicorn web for multiple works |
| WORKERS_PER_CORE | 1 | Set the number of workers to the number of CPU cores multiplied by this value (Value \* CPUs). More info [here][workers_per_core] | | WORKERS_PER_CORE | 1 | Set the number of workers to the number of CPU cores multiplied by this value (Value \* CPUs). More info [here][workers_per_core] |
| MAX_WORKERS | 1 | Set the maximum number of workers to use. Default is not set meaning unlimited. More info [here][max_workers] | | MAX_WORKERS | None | Set the maximum number of workers to use. Default is not set meaning unlimited. More info [here][max_workers] |
| WEB_CONCURRENCY | 1 | Override the automatic definition of number of workers. More info [here][web_concurrency] | | WEB_CONCURRENCY | 2 | Override the automatic definition of number of workers. More info [here][web_concurrency] |
### LDAP ### LDAP
@ -95,3 +95,8 @@ Setting the following environmental variables will change the theme of the front
| THEME_DARK_INFO | #1976D2 | Dark Theme Config Variable | | THEME_DARK_INFO | #1976D2 | Dark Theme Config Variable |
| THEME_DARK_WARNING | #FF6D00 | Dark Theme Config Variable | | THEME_DARK_WARNING | #FF6D00 | Dark Theme Config Variable |
| THEME_DARK_ERROR | #EF5350 | Dark Theme Config Variable | | THEME_DARK_ERROR | #EF5350 | Dark Theme Config Variable |
[workers_per_core]: https://github.com/tiangolo/uvicorn-gunicorn-docker/blob/2daa3e3873c837d5781feb4ff6a40a89f791f81b/README.md#workers_per_core
[max_workers]: https://github.com/tiangolo/uvicorn-gunicorn-docker/blob/2daa3e3873c837d5781feb4ff6a40a89f791f81b/README.md#max_workers
[web_concurrency]: https://github.com/tiangolo/uvicorn-gunicorn-docker/blob/2daa3e3873c837d5781feb4ff6a40a89f791f81b/README.md#web_concurrency

File diff suppressed because one or more lines are too long

View File

@ -19,11 +19,11 @@
</div> </div>
</v-card-text> </v-card-text>
<v-list v-if="showViewer" dense class="mt-0 pt-0"> <v-list v-if="showViewer" dense class="mt-0 pt-0">
<v-list-item v-for="(item, key, index) in labels" :key="index" style="min-height: 25px" dense> <v-list-item v-for="(item, key, index) in renderedList" :key="index" style="min-height: 25px" dense>
<v-list-item-content> <v-list-item-content>
<v-list-item-title class="pl-4 caption flex row"> <v-list-item-title class="pl-4 caption flex row">
<div>{{ item.label }}</div> <div>{{ item.label }}</div>
<div class="ml-auto mr-1">{{ value[key] }}</div> <div class="ml-auto mr-1">{{ item.value }}</div>
<div>{{ item.suffix }}</div> <div>{{ item.suffix }}</div>
</v-list-item-title> </v-list-item-title>
</v-list-item-content> </v-list-item-content>
@ -37,6 +37,14 @@
import { computed, defineComponent, useContext } from "@nuxtjs/composition-api"; import { computed, defineComponent, useContext } from "@nuxtjs/composition-api";
import { Nutrition } from "~/lib/api/types/recipe"; import { Nutrition } from "~/lib/api/types/recipe";
type NutritionLabelType = {
[key: string]: {
label: string;
suffix: string;
value?: string;
};
};
export default defineComponent({ export default defineComponent({
props: { props: {
value: { value: {
@ -50,34 +58,34 @@ export default defineComponent({
}, },
setup(props, context) { setup(props, context) {
const { i18n } = useContext(); const { i18n } = useContext();
const labels = { const labels = <NutritionLabelType>{
calories: { calories: {
label: i18n.t("recipe.calories"), label: i18n.tc("recipe.calories"),
suffix: i18n.t("recipe.calories-suffix"), suffix: i18n.tc("recipe.calories-suffix"),
}, },
fatContent: { fatContent: {
label: i18n.t("recipe.fat-content"), label: i18n.tc("recipe.fat-content"),
suffix: i18n.t("recipe.grams"), suffix: i18n.tc("recipe.grams"),
}, },
fiberContent: { fiberContent: {
label: i18n.t("recipe.fiber-content"), label: i18n.tc("recipe.fiber-content"),
suffix: i18n.t("recipe.grams"), suffix: i18n.tc("recipe.grams"),
}, },
proteinContent: { proteinContent: {
label: i18n.t("recipe.protein-content"), label: i18n.tc("recipe.protein-content"),
suffix: i18n.t("recipe.grams"), suffix: i18n.tc("recipe.grams"),
}, },
sodiumContent: { sodiumContent: {
label: i18n.t("recipe.sodium-content"), label: i18n.tc("recipe.sodium-content"),
suffix: i18n.t("recipe.milligrams"), suffix: i18n.tc("recipe.milligrams"),
}, },
sugarContent: { sugarContent: {
label: i18n.t("recipe.sugar-content"), label: i18n.tc("recipe.sugar-content"),
suffix: i18n.t("recipe.grams"), suffix: i18n.tc("recipe.grams"),
}, },
carbohydrateContent: { carbohydrateContent: {
label: i18n.t("recipe.carbohydrate-content"), label: i18n.tc("recipe.carbohydrate-content"),
suffix: i18n.t("recipe.grams"), suffix: i18n.tc("recipe.grams"),
}, },
}; };
const valueNotNull = computed(() => { const valueNotNull = computed(() => {
@ -96,11 +104,25 @@ export default defineComponent({
context.emit("input", { ...props.value, [key]: event }); context.emit("input", { ...props.value, [key]: event });
} }
// Build a new list that only contains nutritional information that has a value
const renderedList = computed(() => {
return Object.entries(labels).reduce((item: NutritionLabelType, [key, label]) => {
if (props.value[key]?.trim()) {
item[key] = {
...label,
value: props.value[key],
};
}
return item;
}, {});
});
return { return {
labels, labels,
valueNotNull, valueNotNull,
showViewer, showViewer,
updateValue, updateValue,
renderedList,
}; };
}, },
}); });

View File

@ -200,7 +200,7 @@
"created-on-date": "Създадено на {0}", "created-on-date": "Създадено на {0}",
"unsaved-changes": "Имате незапазени промени. Желаете ли да ги запазите преди да излезете? Натиснете Ок за запазване и Отказ за отхвърляне на промените.", "unsaved-changes": "Имате незапазени промени. Желаете ли да ги запазите преди да излезете? Натиснете Ок за запазване и Отказ за отхвърляне на промените.",
"clipboard-copy-failure": "Линкът към рецептата е копиран в клипборда.", "clipboard-copy-failure": "Линкът към рецептата е копиран в клипборда.",
"confirm-delete-generic-items": "Are you sure you want to delete the following items?" "confirm-delete-generic-items": "Сигурни ли сте, че желаете да изтриете следните елементи?"
}, },
"group": { "group": {
"are-you-sure-you-want-to-delete-the-group": "Сигурни ли сте, че искате да изтриете <b>{groupName}<b/>?", "are-you-sure-you-want-to-delete-the-group": "Сигурни ли сте, че искате да изтриете <b>{groupName}<b/>?",
@ -259,7 +259,7 @@
}, },
"meal-plan": { "meal-plan": {
"create-a-new-meal-plan": "Създаване на нов хранителен план", "create-a-new-meal-plan": "Създаване на нов хранителен план",
"update-this-meal-plan": "Update this Meal Plan", "update-this-meal-plan": "Обнови този План за хранене",
"dinner-this-week": "Вечеря тази седмица", "dinner-this-week": "Вечеря тази седмица",
"dinner-today": "Вечеря Днес", "dinner-today": "Вечеря Днес",
"dinner-tonight": "Вечеря ТАЗИ ВЕЧЕР", "dinner-tonight": "Вечеря ТАЗИ ВЕЧЕР",
@ -474,11 +474,11 @@
"add-to-timeline": "Добави към времевата линия", "add-to-timeline": "Добави към времевата линия",
"recipe-added-to-list": "Рецептата е добавена към списъка", "recipe-added-to-list": "Рецептата е добавена към списъка",
"recipes-added-to-list": "Рецептите са добавени към списъка", "recipes-added-to-list": "Рецептите са добавени към списъка",
"successfully-added-to-list": "Successfully added to list", "successfully-added-to-list": "Успешно добавено в списъка",
"recipe-added-to-mealplan": "Рецептата е добавена към хранителния план", "recipe-added-to-mealplan": "Рецептата е добавена към хранителния план",
"failed-to-add-recipes-to-list": "Неуспешно добавяне на рецепта към списъка", "failed-to-add-recipes-to-list": "Неуспешно добавяне на рецепта към списъка",
"failed-to-add-recipe-to-mealplan": "Рецептата не беше добавена към хранителния план", "failed-to-add-recipe-to-mealplan": "Рецептата не беше добавена към хранителния план",
"failed-to-add-to-list": "Failed to add to list", "failed-to-add-to-list": "Неуспешно добавяне към списъка",
"yield": "Добив", "yield": "Добив",
"quantity": "Количество", "quantity": "Количество",
"choose-unit": "Избери единица", "choose-unit": "Избери единица",
@ -537,8 +537,8 @@
"new-recipe-names-must-be-unique": "Името на рецептата трябва да бъде уникално", "new-recipe-names-must-be-unique": "Името на рецептата трябва да бъде уникално",
"scrape-recipe": "Обхождане на рецепта", "scrape-recipe": "Обхождане на рецепта",
"scrape-recipe-description": "Обходи рецепта по линк. Предоставете линк за сайт, който искате да бъде обходен. Mealie ще опита да обходи рецептата от този сайт и да я добави във Вашата колекция.", "scrape-recipe-description": "Обходи рецепта по линк. Предоставете линк за сайт, който искате да бъде обходен. Mealie ще опита да обходи рецептата от този сайт и да я добави във Вашата колекция.",
"scrape-recipe-have-a-lot-of-recipes": "Have a lot of recipes you want to scrape at once?", "scrape-recipe-have-a-lot-of-recipes": "Имате много рецепти, които искате да обходите наведнъж?",
"scrape-recipe-suggest-bulk-importer": "Try out the bulk importer", "scrape-recipe-suggest-bulk-importer": "Пробвайте масовото импорторане",
"import-original-keywords-as-tags": "Импортирай оригиналните ключови думи като тагове", "import-original-keywords-as-tags": "Импортирай оригиналните ключови думи като тагове",
"stay-in-edit-mode": "Остани в режим на редакция", "stay-in-edit-mode": "Остани в режим на редакция",
"import-from-zip": "Импортирай от Zip", "import-from-zip": "Импортирай от Zip",
@ -562,7 +562,7 @@
"upload-image": "Качване на изображение", "upload-image": "Качване на изображение",
"screen-awake": "Запази екрана активен", "screen-awake": "Запази екрана активен",
"remove-image": "Премахване на изображение", "remove-image": "Премахване на изображение",
"nextStep": "Next step" "nextStep": "Следваща стъпка"
}, },
"search": { "search": {
"advanced-search": "Разширено търсене", "advanced-search": "Разширено търсене",
@ -1187,7 +1187,7 @@
"require-all-tools": "Изискване на всички инструменти", "require-all-tools": "Изискване на всички инструменти",
"cookbook-name": "Име на книгата с рецепти", "cookbook-name": "Име на книгата с рецепти",
"cookbook-with-name": "Книга с рецепти {0}", "cookbook-with-name": "Книга с рецепти {0}",
"create-a-cookbook": "Create a Cookbook", "create-a-cookbook": "Създай Готварска книга",
"cookbook": "Cookbook" "cookbook": "Готварска книга"
} }
} }

View File

@ -200,7 +200,7 @@
"created-on-date": "נוצר ב-{0}", "created-on-date": "נוצר ב-{0}",
"unsaved-changes": "יש שינויים שלא נשמרו. לצאת לפני שמירה? אשר לשמירה, בטל למחיקת שינויים.", "unsaved-changes": "יש שינויים שלא נשמרו. לצאת לפני שמירה? אשר לשמירה, בטל למחיקת שינויים.",
"clipboard-copy-failure": "כשלון בהעתקה ללוח ההדבקה.", "clipboard-copy-failure": "כשלון בהעתקה ללוח ההדבקה.",
"confirm-delete-generic-items": "Are you sure you want to delete the following items?" "confirm-delete-generic-items": "האם אתה בטוח שברצונך למחוק את הפריטים הנבחרים?"
}, },
"group": { "group": {
"are-you-sure-you-want-to-delete-the-group": "האם את/ה בטוח/ה שברצונך למחוק את <b>{groupName}<b/>?", "are-you-sure-you-want-to-delete-the-group": "האם את/ה בטוח/ה שברצונך למחוק את <b>{groupName}<b/>?",
@ -259,7 +259,7 @@
}, },
"meal-plan": { "meal-plan": {
"create-a-new-meal-plan": "יצירת תכנית ארוחות חדשה", "create-a-new-meal-plan": "יצירת תכנית ארוחות חדשה",
"update-this-meal-plan": "Update this Meal Plan", "update-this-meal-plan": "עדכן את תכנון הארוחות",
"dinner-this-week": "ארוחות ערב השבוע", "dinner-this-week": "ארוחות ערב השבוע",
"dinner-today": "ארוחת ערב היום", "dinner-today": "ארוחת ערב היום",
"dinner-tonight": "ארוחת ערב היום", "dinner-tonight": "ארוחת ערב היום",
@ -474,11 +474,11 @@
"add-to-timeline": "הוסף לציר הזמן", "add-to-timeline": "הוסף לציר הזמן",
"recipe-added-to-list": "מתכון נוסף לרשימה", "recipe-added-to-list": "מתכון נוסף לרשימה",
"recipes-added-to-list": "מתכונים הוספו לרשימה", "recipes-added-to-list": "מתכונים הוספו לרשימה",
"successfully-added-to-list": "Successfully added to list", "successfully-added-to-list": "נוסף לרשימה בהצלחה",
"recipe-added-to-mealplan": "מתכון נוסף לתכנון ארוחות", "recipe-added-to-mealplan": "מתכון נוסף לתכנון ארוחות",
"failed-to-add-recipes-to-list": "כשלון בהוספת מתכון לרשימה", "failed-to-add-recipes-to-list": "כשלון בהוספת מתכון לרשימה",
"failed-to-add-recipe-to-mealplan": "הוספת מתכון לתכנון ארוחות נכשלה", "failed-to-add-recipe-to-mealplan": "הוספת מתכון לתכנון ארוחות נכשלה",
"failed-to-add-to-list": "Failed to add to list", "failed-to-add-to-list": "כשלון בהוספה לרשימה",
"yield": "תשואה", "yield": "תשואה",
"quantity": "כמות", "quantity": "כמות",
"choose-unit": "בחירת יחידת מידה", "choose-unit": "בחירת יחידת מידה",
@ -515,7 +515,7 @@
"how-did-it-turn-out": "איך יצא?", "how-did-it-turn-out": "איך יצא?",
"user-made-this": "{user} הכין את זה", "user-made-this": "{user} הכין את זה",
"last-made-date": "נעשה לאחרונה ב{date}", "last-made-date": "נעשה לאחרונה ב{date}",
"api-extras-description": "Recipes extras are a key feature of the Mealie API. They allow you to create custom JSON key/value pairs within a recipe, to reference from 3rd party applications. You can use these keys to provide information, for example to trigger automations or custom messages to relay to your desired device.", "api-extras-description": "מתכונים נוספים הם יכולת מפתח של Mealie API. הם מאפשרים ליצור צמדי key/value בצורת JSON על מנת לקרוא אותם בתוכנת צד שלישית. תוכלו להשתמש בצמדים האלה כדי לספק מידע, לדוגמא להפעיל אוטומציות או הודעות מותאמות אישית למכשירים מסויימים.",
"message-key": "מפתח הודעה", "message-key": "מפתח הודעה",
"parse": "ניתוח", "parse": "ניתוח",
"attach-images-hint": "הוסף תמונות ע\"י גרירה ושחרור אל תוך העורך", "attach-images-hint": "הוסף תמונות ע\"י גרירה ושחרור אל תוך העורך",
@ -537,8 +537,8 @@
"new-recipe-names-must-be-unique": "שם מתכון חדש חייב להיות ייחודי", "new-recipe-names-must-be-unique": "שם מתכון חדש חייב להיות ייחודי",
"scrape-recipe": "קריאת מתכון", "scrape-recipe": "קריאת מתכון",
"scrape-recipe-description": "קריאת מתכון בעזרת לינק. ספק את הלינק של האתר שברצונך לקרוא, ומילי תנסה לקרוא את המתכון מהאתר ולהוסיף אותו לאוסף.", "scrape-recipe-description": "קריאת מתכון בעזרת לינק. ספק את הלינק של האתר שברצונך לקרוא, ומילי תנסה לקרוא את המתכון מהאתר ולהוסיף אותו לאוסף.",
"scrape-recipe-have-a-lot-of-recipes": "Have a lot of recipes you want to scrape at once?", "scrape-recipe-have-a-lot-of-recipes": "יש לך הרבה מתכונים שאתה רוצה לקרוא בבת אחת?",
"scrape-recipe-suggest-bulk-importer": "Try out the bulk importer", "scrape-recipe-suggest-bulk-importer": "נסה את יכולת קריאת רשימה",
"import-original-keywords-as-tags": "ייבא שמות מפתח מקוריות כתגיות", "import-original-keywords-as-tags": "ייבא שמות מפתח מקוריות כתגיות",
"stay-in-edit-mode": "השאר במצב עריכה", "stay-in-edit-mode": "השאר במצב עריכה",
"import-from-zip": "ייבא מקובץ", "import-from-zip": "ייבא מקובץ",
@ -562,7 +562,7 @@
"upload-image": "העלה תמונה", "upload-image": "העלה תמונה",
"screen-awake": "השאר את המסך פעיל", "screen-awake": "השאר את המסך פעיל",
"remove-image": "האם למחוק את התמונה?", "remove-image": "האם למחוק את התמונה?",
"nextStep": "Next step" "nextStep": "השלב הבא"
}, },
"search": { "search": {
"advanced-search": "חיפוש מתקדם", "advanced-search": "חיפוש מתקדם",
@ -797,7 +797,7 @@
"untagged-count": "לא מתוייג {count}", "untagged-count": "לא מתוייג {count}",
"create-a-tag": "צור תגית", "create-a-tag": "צור תגית",
"tag-name": "שם תגית", "tag-name": "שם תגית",
"tag": "Tag" "tag": "תגית"
}, },
"tool": { "tool": {
"tools": "כלים", "tools": "כלים",
@ -807,7 +807,7 @@
"create-new-tool": "יצירת כלי חדש", "create-new-tool": "יצירת כלי חדש",
"on-hand-checkbox-label": "הראה מה יש לי במטבח", "on-hand-checkbox-label": "הראה מה יש לי במטבח",
"required-tools": "צריך כלים", "required-tools": "צריך כלים",
"tool": "Tool" "tool": "כלי"
}, },
"user": { "user": {
"admin": "אדמין", "admin": "אדמין",
@ -898,10 +898,10 @@
"user-can-organize-group-data": "משתמש יכול לשנות מידע של קבוצה", "user-can-organize-group-data": "משתמש יכול לשנות מידע של קבוצה",
"enable-advanced-features": "אפשר אפשרויות מתקדמות", "enable-advanced-features": "אפשר אפשרויות מתקדמות",
"it-looks-like-this-is-your-first-time-logging-in": "נראה שזו ההתחברות הראשונה שלך.", "it-looks-like-this-is-your-first-time-logging-in": "נראה שזו ההתחברות הראשונה שלך.",
"dont-want-to-see-this-anymore-be-sure-to-change-your-email": "Don't want to see this anymore? Be sure to change your email in your user settings!", "dont-want-to-see-this-anymore-be-sure-to-change-your-email": "לא רוצה לראות את זה יותר? דאג לשנות את המייל של בהגדרות המשתמש!",
"forgot-password": "שכחתי סיסמא", "forgot-password": "שכחתי סיסמא",
"forgot-password-text": "Please enter your email address and we will send you a link to reset your password.", "forgot-password-text": "נא לספק כתובת דוא\"ל. אנו נשלח לך הודעת דוא\"ל לצורך איפוס הסיסמה שלך.",
"changes-reflected-immediately": "Changes to this user will be reflected immediately." "changes-reflected-immediately": "השינויים למשתמש זה יבוצעו מיידית."
}, },
"language-dialog": { "language-dialog": {
"translated": "תורגם", "translated": "תורגם",
@ -923,8 +923,8 @@
"food-label": "תוית אוכל", "food-label": "תוית אוכל",
"edit-food": "עריכת מזון", "edit-food": "עריכת מזון",
"food-data": "נתוני אוכל", "food-data": "נתוני אוכל",
"example-food-singular": "ex: Onion", "example-food-singular": "דוגמא: בצל",
"example-food-plural": "ex: Onions" "example-food-plural": "דוגמא: בצלים"
}, },
"units": { "units": {
"seed-dialog-text": "אכלס את מסד הנתונים עם יחידות מדידה בהתאם לשפה המקומית שלך.", "seed-dialog-text": "אכלס את מסד הנתונים עם יחידות מדידה בהתאם לשפה המקומית שלך.",
@ -935,7 +935,7 @@
"merging-unit-into-unit": "ממזג את {0} לתוך {1}", "merging-unit-into-unit": "ממזג את {0} לתוך {1}",
"create-unit": "יצירת יחידה", "create-unit": "יצירת יחידה",
"abbreviation": "קיצור", "abbreviation": "קיצור",
"plural-abbreviation": "Plural Abbreviation", "plural-abbreviation": "צורת הרבית",
"description": "תיאור", "description": "תיאור",
"display-as-fraction": "הצגה כשבר", "display-as-fraction": "הצגה כשבר",
"use-abbreviation": "השתמש בקיצור", "use-abbreviation": "השתמש בקיצור",
@ -943,10 +943,10 @@
"unit-data": "נתוני יחידה", "unit-data": "נתוני יחידה",
"use-abbv": "השתמש בקיצור", "use-abbv": "השתמש בקיצור",
"fraction": "שבר", "fraction": "שבר",
"example-unit-singular": "ex: Tablespoon", "example-unit-singular": "דוגמא: כפית",
"example-unit-plural": "ex: Tablespoons", "example-unit-plural": "דוגמא: כפיות",
"example-unit-abbreviation-singular": "ex: Tbsp", "example-unit-abbreviation-singular": "דוגמא: כף",
"example-unit-abbreviation-plural": "ex: Tbsps" "example-unit-abbreviation-plural": "דוגמא: כפות"
}, },
"labels": { "labels": {
"seed-dialog-text": "אכלס את מסד הנתונים בתגיות נפוצות בהתאם לשפה המקומית שלך.", "seed-dialog-text": "אכלס את מסד הנתונים בתגיות נפוצות בהתאם לשפה המקומית שלך.",
@ -1187,7 +1187,7 @@
"require-all-tools": "זקוק לכל הכלים", "require-all-tools": "זקוק לכל הכלים",
"cookbook-name": "שם ספר בישול", "cookbook-name": "שם ספר בישול",
"cookbook-with-name": "ספר בישול {0}", "cookbook-with-name": "ספר בישול {0}",
"create-a-cookbook": "Create a Cookbook", "create-a-cookbook": "צור ספר בישול חדש",
"cookbook": "Cookbook" "cookbook": "ספר בישול"
} }
} }

View File

@ -200,7 +200,7 @@
"created-on-date": "Gemaakt op {0}", "created-on-date": "Gemaakt op {0}",
"unsaved-changes": "Er zijn niet-opgeslagen wijzigingen. Wil je eerst opslaan voordat je vertrekt? Okay om op te slaan, Annuleren om wijzigingen ongedaan te maken.", "unsaved-changes": "Er zijn niet-opgeslagen wijzigingen. Wil je eerst opslaan voordat je vertrekt? Okay om op te slaan, Annuleren om wijzigingen ongedaan te maken.",
"clipboard-copy-failure": "Kopiëren naar klembord mislukt.", "clipboard-copy-failure": "Kopiëren naar klembord mislukt.",
"confirm-delete-generic-items": "Are you sure you want to delete the following items?" "confirm-delete-generic-items": "Weet u zeker dat u de volgende items wilt verwijderen?"
}, },
"group": { "group": {
"are-you-sure-you-want-to-delete-the-group": "Weet je zeker dat je <b>{groupName}<b/> wil verwijderen?", "are-you-sure-you-want-to-delete-the-group": "Weet je zeker dat je <b>{groupName}<b/> wil verwijderen?",

View File

@ -707,7 +707,7 @@
"email-configured": "Email настроен", "email-configured": "Email настроен",
"email-test-results": "Результаты теста Email", "email-test-results": "Результаты теста Email",
"ready": "Готово", "ready": "Готово",
"not-ready": "Не готово - Проверьте переменные окружающей среды", "not-ready": "Не готово - Проверьте переменные окружения",
"succeeded": "Выполнено успешно", "succeeded": "Выполнено успешно",
"failed": "Ошибка", "failed": "Ошибка",
"general-about": "Общая информация", "general-about": "Общая информация",

View File

@ -31,7 +31,24 @@
<BaseButton type="button" :loading="generatingToken" create @click.prevent="handlePasswordReset"> <BaseButton type="button" :loading="generatingToken" create @click.prevent="handlePasswordReset">
{{ $t("user.generate-password-reset-link") }} {{ $t("user.generate-password-reset-link") }}
</BaseButton> </BaseButton>
<AppButtonCopy v-if="resetUrl" :copy-text="resetUrl"></AppButtonCopy> </div>
<div v-if="resetUrl" class="mb-2">
<v-card-text>
<p class="text-center pb-0">
{{ resetUrl }}
</p>
</v-card-text>
<v-card-actions class="align-center pt-0" style="gap: 4px">
<BaseButton cancel @click="resetUrl = ''"> {{ $t("general.close") }} </BaseButton>
<v-spacer></v-spacer>
<BaseButton v-if="user.email" color="info" class="mr-1" @click="sendResetEmail">
<template #icon>
{{ $globals.icons.email }}
</template>
{{ $t("user.email") }}
</BaseButton>
<AppButtonCopy :icon="false" color="info" :copy-text="resetUrl" />
</v-card-actions>
</div> </div>
<AutoForm v-model="user" :items="userForm" update-mode :disabled-fields="disabledFields" /> <AutoForm v-model="user" :items="userForm" update-mode :disabled-fields="disabledFields" />
@ -46,7 +63,7 @@
<script lang="ts"> <script lang="ts">
import { computed, defineComponent, useRoute, onMounted, ref, useContext } from "@nuxtjs/composition-api"; import { computed, defineComponent, useRoute, onMounted, ref, useContext } from "@nuxtjs/composition-api";
import { useAdminApi } from "~/composables/api"; import { useAdminApi, useUserApi } from "~/composables/api";
import { useGroups } from "~/composables/use-groups"; import { useGroups } from "~/composables/use-groups";
import { alert } from "~/composables/use-toast"; import { alert } from "~/composables/use-toast";
import { useUserForm } from "~/composables/use-users"; import { useUserForm } from "~/composables/use-users";
@ -118,6 +135,17 @@ export default defineComponent({
generatingToken.value = false; generatingToken.value = false;
} }
const userApi = useUserApi();
async function sendResetEmail() {
if (!user.value?.email) return;
const { response } = await userApi.email.sendForgotPassword({ email: user.value.email });
if (response && response.status === 200) {
alert.success(i18n.tc("profile.email-sent"));
} else {
alert.error(i18n.tc("profile.error-sending-email"));
}
}
return { return {
user, user,
disabledFields, disabledFields,
@ -130,6 +158,7 @@ export default defineComponent({
handlePasswordReset, handlePasswordReset,
resetUrl, resetUrl,
generatingToken, generatingToken,
sendResetEmail,
}; };
}, },
}); });

View File

@ -112,7 +112,7 @@ async def system_startup():
logger.info("-----SYSTEM STARTUP----- \n") logger.info("-----SYSTEM STARTUP----- \n")
logger.info("------APP SETTINGS------") logger.info("------APP SETTINGS------")
logger.info( logger.info(
settings.json( settings.model_dump_json(
indent=4, indent=4,
exclude={ exclude={
"SECRET", "SECRET",

View File

@ -1,7 +1,9 @@
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from pathlib import Path from pathlib import Path
from urllib import parse as urlparse
from pydantic import BaseModel, BaseSettings, PostgresDsn from pydantic import BaseModel, PostgresDsn
from pydantic_settings import BaseSettings, SettingsConfigDict
class AbstractDBProvider(ABC): class AbstractDBProvider(ABC):
@ -38,15 +40,19 @@ class PostgresProvider(AbstractDBProvider, BaseSettings):
POSTGRES_PORT: str = "5432" POSTGRES_PORT: str = "5432"
POSTGRES_DB: str = "mealie" POSTGRES_DB: str = "mealie"
model_config = SettingsConfigDict(arbitrary_types_allowed=True, extra="allow")
@property @property
def db_url(self) -> str: def db_url(self) -> str:
host = f"{self.POSTGRES_SERVER}:{self.POSTGRES_PORT}" host = f"{self.POSTGRES_SERVER}:{self.POSTGRES_PORT}"
return PostgresDsn.build( return str(
scheme="postgresql", PostgresDsn.build(
user=self.POSTGRES_USER, scheme="postgresql",
password=self.POSTGRES_PASSWORD, username=self.POSTGRES_USER,
host=host, password=urlparse.quote_plus(self.POSTGRES_PASSWORD),
path=f"/{self.POSTGRES_DB or ''}", host=host,
path=f"{self.POSTGRES_DB or ''}",
)
) )
@property @property

View File

@ -1,7 +1,8 @@
import secrets import secrets
from pathlib import Path from pathlib import Path
from pydantic import BaseSettings, NoneStr, validator from pydantic import field_validator
from pydantic_settings import BaseSettings, SettingsConfigDict
from mealie.core.settings.themes import Theme from mealie.core.settings.themes import Theme
@ -55,7 +56,8 @@ class AppSettings(BaseSettings):
SECURITY_USER_LOCKOUT_TIME: int = 24 SECURITY_USER_LOCKOUT_TIME: int = 24
"time in hours" "time in hours"
@validator("BASE_URL") @field_validator("BASE_URL")
@classmethod
def remove_trailing_slash(cls, v: str) -> str: def remove_trailing_slash(cls, v: str) -> str:
if v and v[-1] == "/": if v and v[-1] == "/":
return v[:-1] return v[:-1]
@ -100,12 +102,12 @@ class AppSettings(BaseSettings):
# =============================================== # ===============================================
# Email Configuration # Email Configuration
SMTP_HOST: str | None SMTP_HOST: str | None = None
SMTP_PORT: str | None = "587" SMTP_PORT: str | None = "587"
SMTP_FROM_NAME: str | None = "Mealie" SMTP_FROM_NAME: str | None = "Mealie"
SMTP_FROM_EMAIL: str | None SMTP_FROM_EMAIL: str | None = None
SMTP_USER: str | None SMTP_USER: str | None = None
SMTP_PASSWORD: str | None SMTP_PASSWORD: str | None = None
SMTP_AUTH_STRATEGY: str | None = "TLS" # Options: 'TLS', 'SSL', 'NONE' SMTP_AUTH_STRATEGY: str | None = "TLS" # Options: 'TLS', 'SSL', 'NONE'
@property @property
@ -122,11 +124,11 @@ class AppSettings(BaseSettings):
@staticmethod @staticmethod
def validate_smtp( def validate_smtp(
host: str | None, host: str | None = None,
port: str | None, port: str | None = None,
from_name: str | None, from_name: str | None = None,
from_email: str | None, from_email: str | None = None,
strategy: str | None, strategy: str | None = None,
user: str | None = None, user: str | None = None,
password: str | None = None, password: str | None = None,
) -> bool: ) -> bool:
@ -143,15 +145,15 @@ class AppSettings(BaseSettings):
# LDAP Configuration # LDAP Configuration
LDAP_AUTH_ENABLED: bool = False LDAP_AUTH_ENABLED: bool = False
LDAP_SERVER_URL: NoneStr = None LDAP_SERVER_URL: str | None = None
LDAP_TLS_INSECURE: bool = False LDAP_TLS_INSECURE: bool = False
LDAP_TLS_CACERTFILE: NoneStr = None LDAP_TLS_CACERTFILE: str | None = None
LDAP_ENABLE_STARTTLS: bool = False LDAP_ENABLE_STARTTLS: bool = False
LDAP_BASE_DN: NoneStr = None LDAP_BASE_DN: str | None = None
LDAP_QUERY_BIND: NoneStr = None LDAP_QUERY_BIND: str | None = None
LDAP_QUERY_PASSWORD: NoneStr = None LDAP_QUERY_PASSWORD: str | None = None
LDAP_USER_FILTER: NoneStr = None LDAP_USER_FILTER: str | None = None
LDAP_ADMIN_FILTER: NoneStr = None LDAP_ADMIN_FILTER: str | None = None
LDAP_ID_ATTRIBUTE: str = "uid" LDAP_ID_ATTRIBUTE: str = "uid"
LDAP_MAIL_ATTRIBUTE: str = "mail" LDAP_MAIL_ATTRIBUTE: str = "mail"
LDAP_NAME_ATTRIBUTE: str = "name" LDAP_NAME_ATTRIBUTE: str = "name"
@ -173,9 +175,7 @@ class AppSettings(BaseSettings):
# Testing Config # Testing Config
TESTING: bool = False TESTING: bool = False
model_config = SettingsConfigDict(arbitrary_types_allowed=True, extra="allow")
class Config:
arbitrary_types_allowed = True
def app_settings_constructor(data_dir: Path, production: bool, env_file: Path, env_encoding="utf-8") -> AppSettings: def app_settings_constructor(data_dir: Path, production: bool, env_file: Path, env_encoding="utf-8") -> AppSettings:

View File

@ -1,4 +1,4 @@
from pydantic import BaseSettings from pydantic_settings import BaseSettings, SettingsConfigDict
class Theme(BaseSettings): class Theme(BaseSettings):
@ -17,6 +17,4 @@ class Theme(BaseSettings):
dark_info: str = "#1976D2" dark_info: str = "#1976D2"
dark_warning: str = "#FF6D00" dark_warning: str = "#FF6D00"
dark_error: str = "#EF5350" dark_error: str = "#EF5350"
model_config = SettingsConfigDict(env_prefix="theme_", extra="allow")
class Config:
env_prefix = "theme_"

View File

@ -1,7 +1,7 @@
from functools import wraps from functools import wraps
from uuid import UUID from uuid import UUID
from pydantic import BaseModel, Field, NoneStr from pydantic import BaseModel, ConfigDict, Field
from sqlalchemy import select from sqlalchemy import select
from sqlalchemy.orm import MANYTOMANY, MANYTOONE, ONETOMANY, Session from sqlalchemy.orm import MANYTOMANY, MANYTOONE, ONETOMANY, Session
from sqlalchemy.orm.mapper import Mapper from sqlalchemy.orm.mapper import Mapper
@ -21,7 +21,7 @@ class AutoInitConfig(BaseModel):
Config class for `auto_init` decorator. Config class for `auto_init` decorator.
""" """
get_attr: NoneStr = None get_attr: str | None = None
exclude: set = Field(default_factory=_default_exclusion) exclude: set = Field(default_factory=_default_exclusion)
# auto_create: bool = False # auto_create: bool = False
@ -31,16 +31,16 @@ def _get_config(relation_cls: type[SqlAlchemyBase]) -> AutoInitConfig:
Returns the config for the given class. Returns the config for the given class.
""" """
cfg = AutoInitConfig() cfg = AutoInitConfig()
cfgKeys = cfg.dict().keys() cfgKeys = cfg.model_dump().keys()
# Get the config for the class # Get the config for the class
try: try:
class_config: AutoInitConfig = relation_cls.Config class_config: ConfigDict = relation_cls.model_config
except AttributeError: except AttributeError:
return cfg return cfg
# Map all matching attributes in Config to all AutoInitConfig attributes # Map all matching attributes in Config to all AutoInitConfig attributes
for attr in dir(class_config): for attr in class_config:
if attr in cfgKeys: if attr in cfgKeys:
setattr(cfg, attr, getattr(class_config, attr)) setattr(cfg, attr, class_config[attr])
return cfg return cfg
@ -97,7 +97,7 @@ def handle_one_to_many_list(
updated_elems.append(existing_elem) updated_elems.append(existing_elem)
new_elems = [safe_call(relation_cls, elem, session=session) for elem in elems_to_create] new_elems = [safe_call(relation_cls, elem.copy(), session=session) for elem in elems_to_create]
return new_elems + updated_elems return new_elems + updated_elems
@ -164,7 +164,7 @@ def auto_init(): # sourcery no-metrics
setattr(self, key, instances) setattr(self, key, instances)
elif relation_dir == ONETOMANY: elif relation_dir == ONETOMANY:
instance = safe_call(relation_cls, val, session=session) instance = safe_call(relation_cls, val.copy() if val else None, session=session)
setattr(self, key, instance) setattr(self, key, instance)
elif relation_dir == MANYTOONE and not use_list: elif relation_dir == MANYTOONE and not use_list:

View File

@ -29,12 +29,15 @@ def get_valid_call(func: Callable, args_dict) -> dict:
return {k: v for k, v in args_dict.items() if k in valid_args} return {k: v for k, v in args_dict.items() if k in valid_args}
def safe_call(func, dict_args: dict, **kwargs) -> Any: def safe_call(func, dict_args: dict | None, **kwargs) -> Any:
""" """
Safely calls the supplied function with the supplied dictionary of arguments. Safely calls the supplied function with the supplied dictionary of arguments.
by removing any invalid arguments. by removing any invalid arguments.
""" """
if dict_args is None:
dict_args = {}
if kwargs: if kwargs:
dict_args.update(kwargs) dict_args.update(kwargs)

View File

@ -2,6 +2,7 @@ from typing import TYPE_CHECKING, Optional
import sqlalchemy as sa import sqlalchemy as sa
import sqlalchemy.orm as orm import sqlalchemy.orm as orm
from pydantic import ConfigDict
from sqlalchemy import select from sqlalchemy import select
from sqlalchemy.orm import Mapped, mapped_column from sqlalchemy.orm import Mapped, mapped_column
from sqlalchemy.orm.session import Session from sqlalchemy.orm.session import Session
@ -79,9 +80,8 @@ class Group(SqlAlchemyBase, BaseMixins):
ingredient_foods: Mapped[list["IngredientFoodModel"]] = orm.relationship("IngredientFoodModel", **common_args) ingredient_foods: Mapped[list["IngredientFoodModel"]] = orm.relationship("IngredientFoodModel", **common_args)
tools: Mapped[list["Tool"]] = orm.relationship("Tool", **common_args) tools: Mapped[list["Tool"]] = orm.relationship("Tool", **common_args)
tags: Mapped[list["Tag"]] = orm.relationship("Tag", **common_args) tags: Mapped[list["Tag"]] = orm.relationship("Tag", **common_args)
model_config = ConfigDict(
class Config: exclude={
exclude = {
"users", "users",
"webhooks", "webhooks",
"shopping_lists", "shopping_lists",
@ -91,6 +91,7 @@ class Group(SqlAlchemyBase, BaseMixins):
"mealplans", "mealplans",
"data_exports", "data_exports",
} }
)
@auto_init() @auto_init()
def __init__(self, **_) -> None: def __init__(self, **_) -> None:

View File

@ -1,6 +1,7 @@
from datetime import datetime from datetime import datetime
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from pydantic import ConfigDict
from sqlalchemy import ForeignKey, orm from sqlalchemy import ForeignKey, orm
from sqlalchemy.orm import Mapped, mapped_column from sqlalchemy.orm import Mapped, mapped_column
from sqlalchemy.sql.sqltypes import Boolean, DateTime, String from sqlalchemy.sql.sqltypes import Boolean, DateTime, String
@ -47,9 +48,7 @@ class ReportModel(SqlAlchemyBase, BaseMixins):
# Relationships # Relationships
group_id: Mapped[GUID] = mapped_column(GUID, ForeignKey("groups.id"), nullable=False, index=True) group_id: Mapped[GUID] = mapped_column(GUID, ForeignKey("groups.id"), nullable=False, index=True)
group: Mapped["Group"] = orm.relationship("Group", back_populates="group_reports", single_parent=True) group: Mapped["Group"] = orm.relationship("Group", back_populates="group_reports", single_parent=True)
model_config = ConfigDict(exclude=["entries"])
class Config:
exclude = ["entries"]
@auto_init() @auto_init()
def __init__(self, **_) -> None: def __init__(self, **_) -> None:

View File

@ -1,5 +1,6 @@
from typing import TYPE_CHECKING, Optional from typing import TYPE_CHECKING, Optional
from pydantic import ConfigDict
from sqlalchemy import Boolean, Float, ForeignKey, Integer, String, UniqueConstraint, orm from sqlalchemy import Boolean, Float, ForeignKey, Integer, String, UniqueConstraint, orm
from sqlalchemy.ext.orderinglist import ordering_list from sqlalchemy.ext.orderinglist import ordering_list
from sqlalchemy.orm import Mapped, mapped_column from sqlalchemy.orm import Mapped, mapped_column
@ -69,9 +70,7 @@ class ShoppingListItem(SqlAlchemyBase, BaseMixins):
recipe_references: Mapped[list[ShoppingListItemRecipeReference]] = orm.relationship( recipe_references: Mapped[list[ShoppingListItemRecipeReference]] = orm.relationship(
ShoppingListItemRecipeReference, cascade="all, delete, delete-orphan" ShoppingListItemRecipeReference, cascade="all, delete, delete-orphan"
) )
model_config = ConfigDict(exclude={"id", "label", "food", "unit"})
class Config:
exclude = {"id", "label", "food", "unit"}
@api_extras @api_extras
@auto_init() @auto_init()
@ -91,9 +90,7 @@ class ShoppingListRecipeReference(BaseMixins, SqlAlchemyBase):
) )
recipe_quantity: Mapped[float] = mapped_column(Float, nullable=False) recipe_quantity: Mapped[float] = mapped_column(Float, nullable=False)
model_config = ConfigDict(exclude={"id", "recipe"})
class Config:
exclude = {"id", "recipe"}
@auto_init() @auto_init()
def __init__(self, **_) -> None: def __init__(self, **_) -> None:
@ -112,9 +109,7 @@ class ShoppingListMultiPurposeLabel(SqlAlchemyBase, BaseMixins):
"MultiPurposeLabel", back_populates="shopping_lists_label_settings" "MultiPurposeLabel", back_populates="shopping_lists_label_settings"
) )
position: Mapped[int] = mapped_column(Integer, nullable=False, default=0) position: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
model_config = ConfigDict(exclude={"label"})
class Config:
exclude = {"label"}
@auto_init() @auto_init()
def __init__(self, **_) -> None: def __init__(self, **_) -> None:
@ -146,9 +141,7 @@ class ShoppingList(SqlAlchemyBase, BaseMixins):
collection_class=ordering_list("position"), collection_class=ordering_list("position"),
) )
extras: Mapped[list[ShoppingListExtras]] = orm.relationship("ShoppingListExtras", cascade="all, delete-orphan") extras: Mapped[list[ShoppingListExtras]] = orm.relationship("ShoppingListExtras", cascade="all, delete-orphan")
model_config = ConfigDict(exclude={"id", "list_items"})
class Config:
exclude = {"id", "list_items"}
@api_extras @api_extras
@auto_init() @auto_init()

View File

@ -1,3 +1,4 @@
from pydantic import ConfigDict
from sqlalchemy import ForeignKey, Integer, String, orm from sqlalchemy import ForeignKey, Integer, String, orm
from sqlalchemy.orm import Mapped, mapped_column from sqlalchemy.orm import Mapped, mapped_column
@ -28,12 +29,12 @@ class RecipeInstruction(SqlAlchemyBase):
ingredient_references: Mapped[list[RecipeIngredientRefLink]] = orm.relationship( ingredient_references: Mapped[list[RecipeIngredientRefLink]] = orm.relationship(
RecipeIngredientRefLink, cascade="all, delete-orphan" RecipeIngredientRefLink, cascade="all, delete-orphan"
) )
model_config = ConfigDict(
class Config: exclude={
exclude = {
"id", "id",
"ingredient_references", "ingredient_references",
} }
)
@auto_init() @auto_init()
def __init__(self, ingredient_references, session, **_) -> None: def __init__(self, ingredient_references, session, **_) -> None:

View File

@ -3,6 +3,7 @@ from typing import TYPE_CHECKING
import sqlalchemy as sa import sqlalchemy as sa
import sqlalchemy.orm as orm import sqlalchemy.orm as orm
from pydantic import ConfigDict
from sqlalchemy import event from sqlalchemy import event
from sqlalchemy.ext.orderinglist import ordering_list from sqlalchemy.ext.orderinglist import ordering_list
from sqlalchemy.orm import Mapped, mapped_column, validates from sqlalchemy.orm import Mapped, mapped_column, validates
@ -134,10 +135,9 @@ class RecipeModel(SqlAlchemyBase, BaseMixins):
# Automatically updated by sqlalchemy event, do not write to this manually # Automatically updated by sqlalchemy event, do not write to this manually
name_normalized: Mapped[str] = mapped_column(sa.String, nullable=False, index=True) name_normalized: Mapped[str] = mapped_column(sa.String, nullable=False, index=True)
description_normalized: Mapped[str | None] = mapped_column(sa.String, index=True) description_normalized: Mapped[str | None] = mapped_column(sa.String, index=True)
model_config = ConfigDict(
class Config: get_attr="slug",
get_attr = "slug" exclude={
exclude = {
"assets", "assets",
"notes", "notes",
"nutrition", "nutrition",
@ -146,7 +146,8 @@ class RecipeModel(SqlAlchemyBase, BaseMixins):
"settings", "settings",
"comments", "comments",
"timeline_events", "timeline_events",
} },
)
@validates("name") @validates("name")
def validate_name(self, _, name): def validate_name(self, _, name):

View File

@ -2,6 +2,7 @@ import enum
from datetime import datetime from datetime import datetime
from typing import TYPE_CHECKING, Optional from typing import TYPE_CHECKING, Optional
from pydantic import ConfigDict
from sqlalchemy import Boolean, DateTime, Enum, ForeignKey, Integer, String, orm from sqlalchemy import Boolean, DateTime, Enum, ForeignKey, Integer, String, orm
from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import Mapped, mapped_column from sqlalchemy.orm import Mapped, mapped_column
@ -84,9 +85,8 @@ class User(SqlAlchemyBase, BaseMixins):
favorite_recipes: Mapped[list["RecipeModel"]] = orm.relationship( favorite_recipes: Mapped[list["RecipeModel"]] = orm.relationship(
"RecipeModel", secondary=users_to_favorites, back_populates="favorited_by" "RecipeModel", secondary=users_to_favorites, back_populates="favorited_by"
) )
model_config = ConfigDict(
class Config: exclude={
exclude = {
"password", "password",
"admin", "admin",
"can_manage", "can_manage",
@ -94,6 +94,7 @@ class User(SqlAlchemyBase, BaseMixins):
"can_organize", "can_organize",
"group", "group",
} }
)
@hybrid_property @hybrid_property
def group_slug(self) -> str: def group_slug(self) -> str:

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} is opgedateer, {url}", "generic-updated-with-url": "{name} is opgedateer, {url}",
"generic-duplicated": "{name} is gekopieer", "generic-duplicated": "{name} is gekopieer",
"generic-deleted": "{name} is verwyder" "generic-deleted": "{name} is verwyder"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} تم تحديثه، {url}", "generic-updated-with-url": "{name} تم تحديثه، {url}",
"generic-duplicated": "تم تكرار {name}", "generic-duplicated": "تم تكرار {name}",
"generic-deleted": "تم حذف {name}" "generic-deleted": "تم حذف {name}"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} беше актуализирано, {url}", "generic-updated-with-url": "{name} беше актуализирано, {url}",
"generic-duplicated": "{name} е дублицирано", "generic-duplicated": "{name} е дублицирано",
"generic-deleted": "{name} беше изтрито" "generic-deleted": "{name} беше изтрито"
},
"datetime": {
"year": "година|години",
"day": "ден|дни",
"hour": "час|часове",
"minute": "минута|минути",
"second": "секунда|секунди",
"millisecond": "милисекунда|милисекунди",
"microsecond": "микросекунда|микросекунди"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} ha estat actualitzat, {url}", "generic-updated-with-url": "{name} ha estat actualitzat, {url}",
"generic-duplicated": "S'ha duplicat {name}", "generic-duplicated": "S'ha duplicat {name}",
"generic-deleted": "{name} ha estat eliminat" "generic-deleted": "{name} ha estat eliminat"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} byl aktualizován, {url}", "generic-updated-with-url": "{name} byl aktualizován, {url}",
"generic-duplicated": "{name} byl duplikován", "generic-duplicated": "{name} byl duplikován",
"generic-deleted": "{name} byl odstraněn" "generic-deleted": "{name} byl odstraněn"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} er blevet opdateret, {url}", "generic-updated-with-url": "{name} er blevet opdateret, {url}",
"generic-duplicated": "{name} er blevet dublikeret", "generic-duplicated": "{name} er blevet dublikeret",
"generic-deleted": "{name} er blevet slettet" "generic-deleted": "{name} er blevet slettet"
},
"datetime": {
"year": "år|år",
"day": "dag|dage",
"hour": "time|timer",
"minute": "minut|minutter",
"second": "sekund|sekunder",
"millisecond": "millisekund|millisekunder",
"microsecond": "mikrosekund|mikrosekunder"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} wurde aktualisiert, {url}", "generic-updated-with-url": "{name} wurde aktualisiert, {url}",
"generic-duplicated": "{name} wurde dupliziert", "generic-duplicated": "{name} wurde dupliziert",
"generic-deleted": "{name} wurde gelöscht" "generic-deleted": "{name} wurde gelöscht"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} has been updated, {url}", "generic-updated-with-url": "{name} has been updated, {url}",
"generic-duplicated": "{name} has been duplicated", "generic-duplicated": "{name} has been duplicated",
"generic-deleted": "{name} has been deleted" "generic-deleted": "{name} has been deleted"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} has been updated, {url}", "generic-updated-with-url": "{name} has been updated, {url}",
"generic-duplicated": "{name} has been duplicated", "generic-duplicated": "{name} has been duplicated",
"generic-deleted": "{name} has been deleted" "generic-deleted": "{name} has been deleted"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} has been updated, {url}", "generic-updated-with-url": "{name} has been updated, {url}",
"generic-duplicated": "{name} has been duplicated", "generic-duplicated": "{name} has been duplicated",
"generic-deleted": "{name} has been deleted" "generic-deleted": "{name} has been deleted"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "Se ha actualizado {name}, {url}", "generic-updated-with-url": "Se ha actualizado {name}, {url}",
"generic-duplicated": "Se ha duplicado {name}", "generic-duplicated": "Se ha duplicado {name}",
"generic-deleted": "Se ha eliminado {name}" "generic-deleted": "Se ha eliminado {name}"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} on päivitetty, {url}", "generic-updated-with-url": "{name} on päivitetty, {url}",
"generic-duplicated": "{name} on kahdennettu", "generic-duplicated": "{name} on kahdennettu",
"generic-deleted": "{name} on poistettu" "generic-deleted": "{name} on poistettu"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} a été mis à jour, {url}", "generic-updated-with-url": "{name} a été mis à jour, {url}",
"generic-duplicated": "{name} a été dupliqué", "generic-duplicated": "{name} a été dupliqué",
"generic-deleted": "{name} a été supprimé" "generic-deleted": "{name} a été supprimé"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} a été mis à jour, {url}", "generic-updated-with-url": "{name} a été mis à jour, {url}",
"generic-duplicated": "{name} a été dupliqué", "generic-duplicated": "{name} a été dupliqué",
"generic-deleted": "{name} a été supprimé" "generic-deleted": "{name} a été supprimé"
},
"datetime": {
"year": "année|années",
"day": "jour|jours",
"hour": "heure|heures",
"minute": "minute|minutes",
"second": "seconde|secondes",
"millisecond": "milliseconde|millisecondes",
"microsecond": "microseconde|microsecondes"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} has been updated, {url}", "generic-updated-with-url": "{name} has been updated, {url}",
"generic-duplicated": "{name} has been duplicated", "generic-duplicated": "{name} has been duplicated",
"generic-deleted": "{name} has been deleted" "generic-deleted": "{name} has been deleted"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} עודכן, {url}", "generic-updated-with-url": "{name} עודכן, {url}",
"generic-duplicated": "{name} שוכפל", "generic-duplicated": "{name} שוכפל",
"generic-deleted": "{name} נמחק" "generic-deleted": "{name} נמחק"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} je ažuriran, {url}", "generic-updated-with-url": "{name} je ažuriran, {url}",
"generic-duplicated": "{name} je dupliciran", "generic-duplicated": "{name} je dupliciran",
"generic-deleted": "{name} je obrisan" "generic-deleted": "{name} je obrisan"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} frissítve, {url}", "generic-updated-with-url": "{name} frissítve, {url}",
"generic-duplicated": "{name} duplikálva", "generic-duplicated": "{name} duplikálva",
"generic-deleted": "{name} törölve lett" "generic-deleted": "{name} törölve lett"
},
"datetime": {
"year": "év",
"day": "nap",
"hour": "óra",
"minute": "perc",
"second": "másodperc",
"millisecond": "ezredmásodperc",
"microsecond": "mikroszekundum"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} has been updated, {url}", "generic-updated-with-url": "{name} has been updated, {url}",
"generic-duplicated": "{name} has been duplicated", "generic-duplicated": "{name} has been duplicated",
"generic-deleted": "{name} has been deleted" "generic-deleted": "{name} has been deleted"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} è stato aggiornato, {url}", "generic-updated-with-url": "{name} è stato aggiornato, {url}",
"generic-duplicated": "{name} è stato duplicato", "generic-duplicated": "{name} è stato duplicato",
"generic-deleted": "{name} è stato eliminato" "generic-deleted": "{name} è stato eliminato"
},
"datetime": {
"year": "anno|anni",
"day": "giorno|giorni",
"hour": "ora|ore",
"minute": "minuto|minuti",
"second": "secondo|secondi",
"millisecond": "millisecondo|millisecondi",
"microsecond": "microsecondo|microsecondi"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} has been updated, {url}", "generic-updated-with-url": "{name} has been updated, {url}",
"generic-duplicated": "{name} has been duplicated", "generic-duplicated": "{name} has been duplicated",
"generic-deleted": "{name} has been deleted" "generic-deleted": "{name} has been deleted"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} has been updated, {url}", "generic-updated-with-url": "{name} has been updated, {url}",
"generic-duplicated": "{name} has been duplicated", "generic-duplicated": "{name} has been duplicated",
"generic-deleted": "{name} has been deleted" "generic-deleted": "{name} has been deleted"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} atnaujintas, {url}", "generic-updated-with-url": "{name} atnaujintas, {url}",
"generic-duplicated": "{name} buvo nukopijuotas", "generic-duplicated": "{name} buvo nukopijuotas",
"generic-deleted": "{name} ištrintas" "generic-deleted": "{name} ištrintas"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} has been updated, {url}", "generic-updated-with-url": "{name} has been updated, {url}",
"generic-duplicated": "{name} has been duplicated", "generic-duplicated": "{name} has been duplicated",
"generic-deleted": "{name} has been deleted" "generic-deleted": "{name} has been deleted"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} is bijgewerkt, {url}", "generic-updated-with-url": "{name} is bijgewerkt, {url}",
"generic-duplicated": "(naam) is gekopieerd", "generic-duplicated": "(naam) is gekopieerd",
"generic-deleted": "{name} is verwijderd" "generic-deleted": "{name} is verwijderd"
},
"datetime": {
"year": "jaar|jaren",
"day": "dag|dagen",
"hour": "uur|uren",
"minute": "minuut|minuten",
"second": "seconde|seconden",
"millisecond": "milliseconde milliseconden",
"microsecond": "microseconde microseconden"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} har blitt oppdatert, {url}", "generic-updated-with-url": "{name} har blitt oppdatert, {url}",
"generic-duplicated": "{name} har blitt duplisert", "generic-duplicated": "{name} har blitt duplisert",
"generic-deleted": "{name} har blitt slettet" "generic-deleted": "{name} har blitt slettet"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} został zaktualizowany. {url}", "generic-updated-with-url": "{name} został zaktualizowany. {url}",
"generic-duplicated": "{name} został zduplikowany", "generic-duplicated": "{name} został zduplikowany",
"generic-deleted": "{name} został usunięty" "generic-deleted": "{name} został usunięty"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} foi atualizado, {url}", "generic-updated-with-url": "{name} foi atualizado, {url}",
"generic-duplicated": "{name} foi duplicada", "generic-duplicated": "{name} foi duplicada",
"generic-deleted": "{name} foi excluído" "generic-deleted": "{name} foi excluído"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} foi atualizado, {url}", "generic-updated-with-url": "{name} foi atualizado, {url}",
"generic-duplicated": "{name} foi duplicado", "generic-duplicated": "{name} foi duplicado",
"generic-deleted": "{name} foi removido" "generic-deleted": "{name} foi removido"
},
"datetime": {
"year": "ano|anos",
"day": "dia|dias",
"hour": "hora|horas",
"minute": "minuto|minutos",
"second": "segundo|segundos",
"millisecond": "milissegundo|milissegundos",
"microsecond": "microssegundo|microssegundos"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} a fost actualizat, {url}", "generic-updated-with-url": "{name} a fost actualizat, {url}",
"generic-duplicated": "{name} a fost duplicat", "generic-duplicated": "{name} a fost duplicat",
"generic-deleted": "{name} a fost șters" "generic-deleted": "{name} a fost șters"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} был обновлен, {url}", "generic-updated-with-url": "{name} был обновлен, {url}",
"generic-duplicated": "Копия {name} была создана", "generic-duplicated": "Копия {name} была создана",
"generic-deleted": "{name} был удален" "generic-deleted": "{name} был удален"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} bol aktualizovaný, {url}", "generic-updated-with-url": "{name} bol aktualizovaný, {url}",
"generic-duplicated": "{name} bol duplikovaný", "generic-duplicated": "{name} bol duplikovaný",
"generic-deleted": "{name} bol vymazaný" "generic-deleted": "{name} bol vymazaný"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} je bil posodobljen, {url}", "generic-updated-with-url": "{name} je bil posodobljen, {url}",
"generic-duplicated": "{name} je bilo podvojeno", "generic-duplicated": "{name} je bilo podvojeno",
"generic-deleted": "{name} je bil izbrisan" "generic-deleted": "{name} je bil izbrisan"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} је ажурирано, {url}", "generic-updated-with-url": "{name} је ажурирано, {url}",
"generic-duplicated": "{name} је дуплиран", "generic-duplicated": "{name} је дуплиран",
"generic-deleted": "{name} је обрисан" "generic-deleted": "{name} је обрисан"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} har uppdaterats, {url}", "generic-updated-with-url": "{name} har uppdaterats, {url}",
"generic-duplicated": "{name} har duplicerats", "generic-duplicated": "{name} har duplicerats",
"generic-deleted": "{name} har tagits bort" "generic-deleted": "{name} har tagits bort"
},
"datetime": {
"year": "år|år",
"day": "dag|dagar",
"hour": "timme|timmar",
"minute": "minut|minuter",
"second": "sekund|sekunder",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} güncellendi, {url}", "generic-updated-with-url": "{name} güncellendi, {url}",
"generic-duplicated": "{name} yinelendi", "generic-duplicated": "{name} yinelendi",
"generic-deleted": "{name} silindi" "generic-deleted": "{name} silindi"
},
"datetime": {
"year": "yıl|yıllar",
"day": "gün|günler",
"hour": "saat|saatler",
"minute": "dakika|dakikalar",
"second": "saniye|saniyeler",
"millisecond": "milisaniye|milisaniyeler",
"microsecond": "mikrosaniye|mikrosaniyeler"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} оновлено, {url}", "generic-updated-with-url": "{name} оновлено, {url}",
"generic-duplicated": "{name} дубльовано", "generic-duplicated": "{name} дубльовано",
"generic-deleted": "{name} видалено" "generic-deleted": "{name} видалено"
},
"datetime": {
"year": "рік|роки",
"day": "день|дні",
"hour": "година|години",
"minute": "хвилина|хвилини",
"second": "секунда|секунди",
"millisecond": "мілісекунда|мілісекунди",
"microsecond": "мікросекунда|мікросекунди"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} has been updated, {url}", "generic-updated-with-url": "{name} has been updated, {url}",
"generic-duplicated": "{name} has been duplicated", "generic-duplicated": "{name} has been duplicated",
"generic-deleted": "{name} has been deleted" "generic-deleted": "{name} has been deleted"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} 已创建, {url}", "generic-updated-with-url": "{name} 已创建, {url}",
"generic-duplicated": "{name} 已复制", "generic-duplicated": "{name} 已复制",
"generic-deleted": "{name} 已删除" "generic-deleted": "{name} 已删除"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -31,5 +31,14 @@
"generic-updated-with-url": "{name} has been updated, {url}", "generic-updated-with-url": "{name} has been updated, {url}",
"generic-duplicated": "{name} has been duplicated", "generic-duplicated": "{name} has been duplicated",
"generic-deleted": "{name} has been deleted" "generic-deleted": "{name} has been deleted"
},
"datetime": {
"year": "year|years",
"day": "day|days",
"hour": "hour|hours",
"minute": "minute|minutes",
"second": "second|seconds",
"millisecond": "millisecond|milliseconds",
"microsecond": "microsecond|microseconds"
} }
} }

View File

@ -1,6 +1,7 @@
import json import json
from dataclasses import dataclass from dataclasses import dataclass
from pathlib import Path from pathlib import Path
from typing import cast
@dataclass(slots=True) @dataclass(slots=True)
@ -13,6 +14,22 @@ class JsonProvider:
else: else:
self.translations = path self.translations = path
def _parse_plurals(self, value: str, count: float):
# based off of: https://kazupon.github.io/vue-i18n/guide/pluralization.html
values = [v.strip() for v in value.split("|")]
if len(values) == 1:
return value
elif len(values) == 2:
return values[0] if count == 1 else values[1]
elif len(values) == 3:
if count == 0:
return values[0]
else:
return values[1] if count == 1 else values[2]
else:
return values[0]
def t(self, key: str, default=None, **kwargs) -> str: def t(self, key: str, default=None, **kwargs) -> str:
keys = key.split(".") keys = key.split(".")
@ -30,9 +47,12 @@ class JsonProvider:
if i == last: if i == last:
for key, value in kwargs.items(): for key, value in kwargs.items():
if not value: translation_value = cast(str, translation_value)
if value is None:
value = "" value = ""
translation_value = translation_value.replace("{" + key + "}", value) if key == "count":
return translation_value translation_value = self._parse_plurals(translation_value, float(value))
translation_value = translation_value.replace("{" + key + "}", str(value)) # type: ignore
return translation_value # type: ignore
return default or key return default or key

View File

@ -106,7 +106,7 @@ class RepositoryGeneric(Generic[Schema, Model]):
except AttributeError: except AttributeError:
self.logger.info(f'Attempted to sort by unknown sort property "{order_by}"; ignoring') self.logger.info(f'Attempted to sort by unknown sort property "{order_by}"; ignoring')
result = self.session.execute(q.offset(start).limit(limit)).unique().scalars().all() result = self.session.execute(q.offset(start).limit(limit)).unique().scalars().all()
return [eff_schema.from_orm(x) for x in result] return [eff_schema.model_validate(x) for x in result]
def multi_query( def multi_query(
self, self,
@ -129,7 +129,7 @@ class RepositoryGeneric(Generic[Schema, Model]):
q = q.offset(start).limit(limit) q = q.offset(start).limit(limit)
result = self.session.execute(q).unique().scalars().all() result = self.session.execute(q).unique().scalars().all()
return [eff_schema.from_orm(x) for x in result] return [eff_schema.model_validate(x) for x in result]
def _query_one(self, match_value: str | int | UUID4, match_key: str | None = None) -> Model: def _query_one(self, match_value: str | int | UUID4, match_key: str | None = None) -> Model:
""" """
@ -161,11 +161,11 @@ class RepositoryGeneric(Generic[Schema, Model]):
if not result: if not result:
return None return None
return eff_schema.from_orm(result) return eff_schema.model_validate(result)
def create(self, data: Schema | BaseModel | dict) -> Schema: def create(self, data: Schema | BaseModel | dict) -> Schema:
try: try:
data = data if isinstance(data, dict) else data.dict() data = data if isinstance(data, dict) else data.model_dump()
new_document = self.model(session=self.session, **data) new_document = self.model(session=self.session, **data)
self.session.add(new_document) self.session.add(new_document)
self.session.commit() self.session.commit()
@ -175,12 +175,12 @@ class RepositoryGeneric(Generic[Schema, Model]):
self.session.refresh(new_document) self.session.refresh(new_document)
return self.schema.from_orm(new_document) return self.schema.model_validate(new_document)
def create_many(self, data: Iterable[Schema | dict]) -> list[Schema]: def create_many(self, data: Iterable[Schema | dict]) -> list[Schema]:
new_documents = [] new_documents = []
for document in data: for document in data:
document = document if isinstance(document, dict) else document.dict() document = document if isinstance(document, dict) else document.model_dump()
new_document = self.model(session=self.session, **document) new_document = self.model(session=self.session, **document)
new_documents.append(new_document) new_documents.append(new_document)
@ -190,7 +190,7 @@ class RepositoryGeneric(Generic[Schema, Model]):
for created_document in new_documents: for created_document in new_documents:
self.session.refresh(created_document) self.session.refresh(created_document)
return [self.schema.from_orm(x) for x in new_documents] return [self.schema.model_validate(x) for x in new_documents]
def update(self, match_value: str | int | UUID4, new_data: dict | BaseModel) -> Schema: def update(self, match_value: str | int | UUID4, new_data: dict | BaseModel) -> Schema:
"""Update a database entry. """Update a database entry.
@ -202,18 +202,18 @@ class RepositoryGeneric(Generic[Schema, Model]):
Returns: Returns:
dict: Returns a dictionary representation of the database entry dict: Returns a dictionary representation of the database entry
""" """
new_data = new_data if isinstance(new_data, dict) else new_data.dict() new_data = new_data if isinstance(new_data, dict) else new_data.model_dump()
entry = self._query_one(match_value=match_value) entry = self._query_one(match_value=match_value)
entry.update(session=self.session, **new_data) entry.update(session=self.session, **new_data)
self.session.commit() self.session.commit()
return self.schema.from_orm(entry) return self.schema.model_validate(entry)
def update_many(self, data: Iterable[Schema | dict]) -> list[Schema]: def update_many(self, data: Iterable[Schema | dict]) -> list[Schema]:
document_data_by_id: dict[str, dict] = {} document_data_by_id: dict[str, dict] = {}
for document in data: for document in data:
document_data = document if isinstance(document, dict) else document.dict() document_data = document if isinstance(document, dict) else document.model_dump()
document_data_by_id[document_data["id"]] = document_data document_data_by_id[document_data["id"]] = document_data
documents_to_update_query = self._query().filter(self.model.id.in_(list(document_data_by_id.keys()))) documents_to_update_query = self._query().filter(self.model.id.in_(list(document_data_by_id.keys())))
@ -226,14 +226,14 @@ class RepositoryGeneric(Generic[Schema, Model]):
updated_documents.append(document_to_update) updated_documents.append(document_to_update)
self.session.commit() self.session.commit()
return [self.schema.from_orm(x) for x in updated_documents] return [self.schema.model_validate(x) for x in updated_documents]
def patch(self, match_value: str | int | UUID4, new_data: dict | BaseModel) -> Schema: def patch(self, match_value: str | int | UUID4, new_data: dict | BaseModel) -> Schema:
new_data = new_data if isinstance(new_data, dict) else new_data.dict() new_data = new_data if isinstance(new_data, dict) else new_data.model_dump()
entry = self._query_one(match_value=match_value) entry = self._query_one(match_value=match_value)
entry_as_dict = self.schema.from_orm(entry).dict() entry_as_dict = self.schema.model_validate(entry).model_dump()
entry_as_dict.update(new_data) entry_as_dict.update(new_data)
return self.update(match_value, entry_as_dict) return self.update(match_value, entry_as_dict)
@ -242,7 +242,7 @@ class RepositoryGeneric(Generic[Schema, Model]):
match_key = match_key or self.primary_key match_key = match_key or self.primary_key
result = self._query_one(value, match_key) result = self._query_one(value, match_key)
results_as_model = self.schema.from_orm(result) results_as_model = self.schema.model_validate(result)
try: try:
self.session.delete(result) self.session.delete(result)
@ -256,7 +256,7 @@ class RepositoryGeneric(Generic[Schema, Model]):
def delete_many(self, values: Iterable) -> Schema: def delete_many(self, values: Iterable) -> Schema:
query = self._query().filter(self.model.id.in_(values)) # type: ignore query = self._query().filter(self.model.id.in_(values)) # type: ignore
results = self.session.execute(query).unique().scalars().all() results = self.session.execute(query).unique().scalars().all()
results_as_model = [self.schema.from_orm(result) for result in results] results_as_model = [self.schema.model_validate(result) for result in results]
try: try:
# we create a delete statement for each row # we create a delete statement for each row
@ -295,7 +295,7 @@ class RepositoryGeneric(Generic[Schema, Model]):
return self.session.scalar(q) return self.session.scalar(q)
else: else:
q = self._query(override_schema=eff_schema).filter(attribute_name == attr_match) q = self._query(override_schema=eff_schema).filter(attribute_name == attr_match)
return [eff_schema.from_orm(x) for x in self.session.execute(q).scalars().all()] return [eff_schema.model_validate(x) for x in self.session.execute(q).scalars().all()]
def page_all(self, pagination: PaginationQuery, override=None, search: str | None = None) -> PaginationBase[Schema]: def page_all(self, pagination: PaginationQuery, override=None, search: str | None = None) -> PaginationBase[Schema]:
""" """
@ -309,7 +309,7 @@ class RepositoryGeneric(Generic[Schema, Model]):
""" """
eff_schema = override or self.schema eff_schema = override or self.schema
# Copy this, because calling methods (e.g. tests) might rely on it not getting mutated # Copy this, because calling methods (e.g. tests) might rely on it not getting mutated
pagination_result = pagination.copy() pagination_result = pagination.model_copy()
q = self._query(override_schema=eff_schema, with_options=False) q = self._query(override_schema=eff_schema, with_options=False)
fltr = self._filter_builder() fltr = self._filter_builder()
@ -336,7 +336,7 @@ class RepositoryGeneric(Generic[Schema, Model]):
per_page=pagination_result.per_page, per_page=pagination_result.per_page,
total=count, total=count,
total_pages=total_pages, total_pages=total_pages,
items=[eff_schema.from_orm(s) for s in data], items=[eff_schema.model_validate(s) for s in data],
) )
def add_pagination_to_query(self, query: Select, pagination: PaginationQuery) -> tuple[Select, int, int]: def add_pagination_to_query(self, query: Select, pagination: PaginationQuery) -> tuple[Select, int, int]:

View File

@ -23,7 +23,7 @@ from .repository_generic import RepositoryGeneric
class RepositoryGroup(RepositoryGeneric[GroupInDB, Group]): class RepositoryGroup(RepositoryGeneric[GroupInDB, Group]):
def create(self, data: GroupBase | dict) -> GroupInDB: def create(self, data: GroupBase | dict) -> GroupInDB:
if isinstance(data, GroupBase): if isinstance(data, GroupBase):
data = data.dict() data = data.model_dump()
max_attempts = 10 max_attempts = 10
original_name = cast(str, data["name"]) original_name = cast(str, data["name"])
@ -61,7 +61,7 @@ class RepositoryGroup(RepositoryGeneric[GroupInDB, Group]):
dbgroup = self.session.execute(select(self.model).filter_by(name=name)).scalars().one_or_none() dbgroup = self.session.execute(select(self.model).filter_by(name=name)).scalars().one_or_none()
if dbgroup is None: if dbgroup is None:
return None return None
return self.schema.from_orm(dbgroup) return self.schema.model_validate(dbgroup)
def get_by_slug_or_id(self, slug_or_id: str | UUID) -> GroupInDB | None: def get_by_slug_or_id(self, slug_or_id: str | UUID) -> GroupInDB | None:
if isinstance(slug_or_id, str): if isinstance(slug_or_id, str):

View File

@ -28,4 +28,4 @@ class RepositoryMealPlanRules(RepositoryGeneric[PlanRulesOut, GroupMealPlanRules
rules = self.session.execute(stmt).scalars().all() rules = self.session.execute(stmt).scalars().all()
return [self.schema.from_orm(x) for x in rules] return [self.schema.model_validate(x) for x in rules]

View File

@ -17,4 +17,4 @@ class RepositoryMeals(RepositoryGeneric[ReadPlanEntry, GroupMealPlan]):
today = date.today() today = date.today()
stmt = select(GroupMealPlan).filter(GroupMealPlan.date == today, GroupMealPlan.group_id == group_id) stmt = select(GroupMealPlan).filter(GroupMealPlan.date == today, GroupMealPlan.group_id == group_id)
plans = self.session.execute(stmt).scalars().all() plans = self.session.execute(stmt).scalars().all()
return [self.schema.from_orm(x) for x in plans] return [self.schema.model_validate(x) for x in plans]

View File

@ -58,7 +58,7 @@ class RepositoryRecipes(RepositoryGeneric[Recipe, RecipeModel]):
.offset(start) .offset(start)
.limit(limit) .limit(limit)
) )
return [eff_schema.from_orm(x) for x in self.session.execute(stmt).scalars().all()] return [eff_schema.model_validate(x) for x in self.session.execute(stmt).scalars().all()]
stmt = ( stmt = (
select(self.model) select(self.model)
@ -67,7 +67,7 @@ class RepositoryRecipes(RepositoryGeneric[Recipe, RecipeModel]):
.offset(start) .offset(start)
.limit(limit) .limit(limit)
) )
return [eff_schema.from_orm(x) for x in self.session.execute(stmt).scalars().all()] return [eff_schema.model_validate(x) for x in self.session.execute(stmt).scalars().all()]
def update_image(self, slug: str, _: str | None = None) -> int: def update_image(self, slug: str, _: str | None = None) -> int:
entry: RecipeModel = self._query_one(match_value=slug) entry: RecipeModel = self._query_one(match_value=slug)
@ -160,7 +160,7 @@ class RepositoryRecipes(RepositoryGeneric[Recipe, RecipeModel]):
search: str | None = None, search: str | None = None,
) -> RecipePagination: ) -> RecipePagination:
# Copy this, because calling methods (e.g. tests) might rely on it not getting mutated # Copy this, because calling methods (e.g. tests) might rely on it not getting mutated
pagination_result = pagination.copy() pagination_result = pagination.model_copy()
q = select(self.model) q = select(self.model)
args = [ args = [
@ -216,7 +216,7 @@ class RepositoryRecipes(RepositoryGeneric[Recipe, RecipeModel]):
self.session.rollback() self.session.rollback()
raise e raise e
items = [RecipeSummary.from_orm(item) for item in data] items = [RecipeSummary.model_validate(item) for item in data]
return RecipePagination( return RecipePagination(
page=pagination_result.page, page=pagination_result.page,
per_page=pagination_result.per_page, per_page=pagination_result.per_page,
@ -236,7 +236,7 @@ class RepositoryRecipes(RepositoryGeneric[Recipe, RecipeModel]):
.join(RecipeModel.recipe_category) .join(RecipeModel.recipe_category)
.filter(RecipeModel.recipe_category.any(Category.id.in_(ids))) .filter(RecipeModel.recipe_category.any(Category.id.in_(ids)))
) )
return [RecipeSummary.from_orm(x) for x in self.session.execute(stmt).unique().scalars().all()] return [RecipeSummary.model_validate(x) for x in self.session.execute(stmt).unique().scalars().all()]
def _build_recipe_filter( def _build_recipe_filter(
self, self,
@ -298,7 +298,7 @@ class RepositoryRecipes(RepositoryGeneric[Recipe, RecipeModel]):
require_all_tools=require_all_tools, require_all_tools=require_all_tools,
) )
stmt = select(RecipeModel).filter(*fltr) stmt = select(RecipeModel).filter(*fltr)
return [self.schema.from_orm(x) for x in self.session.execute(stmt).scalars().all()] return [self.schema.model_validate(x) for x in self.session.execute(stmt).scalars().all()]
def get_random_by_categories_and_tags( def get_random_by_categories_and_tags(
self, categories: list[RecipeCategory], tags: list[RecipeTag] self, categories: list[RecipeCategory], tags: list[RecipeTag]
@ -316,7 +316,7 @@ class RepositoryRecipes(RepositoryGeneric[Recipe, RecipeModel]):
stmt = ( stmt = (
select(RecipeModel).filter(and_(*filters)).order_by(func.random()).limit(1) # Postgres and SQLite specific select(RecipeModel).filter(and_(*filters)).order_by(func.random()).limit(1) # Postgres and SQLite specific
) )
return [self.schema.from_orm(x) for x in self.session.execute(stmt).scalars().all()] return [self.schema.model_validate(x) for x in self.session.execute(stmt).scalars().all()]
def get_random(self, limit=1) -> list[Recipe]: def get_random(self, limit=1) -> list[Recipe]:
stmt = ( stmt = (
@ -325,14 +325,14 @@ class RepositoryRecipes(RepositoryGeneric[Recipe, RecipeModel]):
.order_by(func.random()) # Postgres and SQLite specific .order_by(func.random()) # Postgres and SQLite specific
.limit(limit) .limit(limit)
) )
return [self.schema.from_orm(x) for x in self.session.execute(stmt).scalars().all()] return [self.schema.model_validate(x) for x in self.session.execute(stmt).scalars().all()]
def get_by_slug(self, group_id: UUID4, slug: str, limit=1) -> Recipe | None: def get_by_slug(self, group_id: UUID4, slug: str, limit=1) -> Recipe | None:
stmt = select(RecipeModel).filter(RecipeModel.group_id == group_id, RecipeModel.slug == slug) stmt = select(RecipeModel).filter(RecipeModel.group_id == group_id, RecipeModel.slug == slug)
dbrecipe = self.session.execute(stmt).scalars().one_or_none() dbrecipe = self.session.execute(stmt).scalars().one_or_none()
if dbrecipe is None: if dbrecipe is None:
return None return None
return self.schema.from_orm(dbrecipe) return self.schema.model_validate(dbrecipe)
def all_ids(self, group_id: UUID4) -> Sequence[UUID4]: def all_ids(self, group_id: UUID4) -> Sequence[UUID4]:
stmt = select(RecipeModel.id).filter(RecipeModel.group_id == group_id) stmt = select(RecipeModel.id).filter(RecipeModel.group_id == group_id)

View File

@ -18,7 +18,7 @@ class RepositoryUsers(RepositoryGeneric[PrivateUser, User]):
def update_password(self, id, password: str): def update_password(self, id, password: str):
entry = self._query_one(match_value=id) entry = self._query_one(match_value=id)
if settings.IS_DEMO: if settings.IS_DEMO:
user_to_update = self.schema.from_orm(entry) user_to_update = self.schema.model_validate(entry)
if user_to_update.is_default_user: if user_to_update.is_default_user:
# do not update the default user in demo mode # do not update the default user in demo mode
return user_to_update return user_to_update
@ -26,7 +26,7 @@ class RepositoryUsers(RepositoryGeneric[PrivateUser, User]):
entry.update_password(password) entry.update_password(password)
self.session.commit() self.session.commit()
return self.schema.from_orm(entry) return self.schema.model_validate(entry)
def create(self, user: PrivateUser | dict): # type: ignore def create(self, user: PrivateUser | dict): # type: ignore
new_user = super().create(user) new_user = super().create(user)
@ -66,9 +66,9 @@ class RepositoryUsers(RepositoryGeneric[PrivateUser, User]):
def get_by_username(self, username: str) -> PrivateUser | None: def get_by_username(self, username: str) -> PrivateUser | None:
stmt = select(User).filter(User.username == username) stmt = select(User).filter(User.username == username)
dbuser = self.session.execute(stmt).scalars().one_or_none() dbuser = self.session.execute(stmt).scalars().one_or_none()
return None if dbuser is None else self.schema.from_orm(dbuser) return None if dbuser is None else self.schema.model_validate(dbuser)
def get_locked_users(self) -> list[PrivateUser]: def get_locked_users(self) -> list[PrivateUser]:
stmt = select(User).filter(User.locked_at != None) # noqa E711 stmt = select(User).filter(User.locked_at != None) # noqa E711
results = self.session.execute(stmt).scalars().all() results = self.session.execute(stmt).scalars().all()
return [self.schema.from_orm(x) for x in results] return [self.schema.model_validate(x) for x in results]

View File

@ -2,7 +2,7 @@ from abc import ABC
from logging import Logger from logging import Logger
from fastapi import Depends from fastapi import Depends
from pydantic import UUID4 from pydantic import UUID4, ConfigDict
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from mealie.core.config import get_app_dirs, get_app_settings from mealie.core.config import get_app_dirs, get_app_settings
@ -25,10 +25,10 @@ class _BaseController(ABC):
session: Session = Depends(generate_session) session: Session = Depends(generate_session)
translator: Translator = Depends(local_provider) translator: Translator = Depends(local_provider)
_repos: AllRepositories | None _repos: AllRepositories | None = None
_logger: Logger | None _logger: Logger | None = None
_settings: AppSettings | None _settings: AppSettings | None = None
_folders: AppDirectories | None _folders: AppDirectories | None = None
@property @property
def t(self): def t(self):
@ -58,8 +58,7 @@ class _BaseController(ABC):
self._folders = get_app_dirs() self._folders = get_app_dirs()
return self._folders return self._folders
class Config: model_config = ConfigDict(arbitrary_types_allowed=True)
arbitrary_types_allowed = True
class BasePublicController(_BaseController): class BasePublicController(_BaseController):

View File

@ -6,11 +6,10 @@ See their repository for details -> https://github.com/dmontagu/fastapi-utils
import inspect import inspect
from collections.abc import Callable from collections.abc import Callable
from typing import Any, TypeVar, cast, get_type_hints from typing import Any, ClassVar, ForwardRef, TypeVar, cast, get_origin, get_type_hints
from fastapi import APIRouter, Depends from fastapi import APIRouter, Depends
from fastapi.routing import APIRoute from fastapi.routing import APIRoute
from pydantic.typing import is_classvar
from starlette.routing import Route, WebSocketRoute from starlette.routing import Route, WebSocketRoute
T = TypeVar("T") T = TypeVar("T")
@ -47,6 +46,25 @@ def _cbv(router: APIRouter, cls: type[T], *urls: str, instance: Any | None = Non
return cls return cls
# copied from Pydantic V1 Source: https://github.com/pydantic/pydantic/blob/1c91c8627b541b22354b9ed56b9ef1bb21ac6fbd/pydantic/v1/typing.py
def _check_classvar(v: type[Any] | None) -> bool:
if v is None:
return False
return v.__class__ == ClassVar.__class__ and getattr(v, "_name", None) == "ClassVar"
# copied from Pydantic V1 Source: https://github.com/pydantic/pydantic/blob/1c91c8627b541b22354b9ed56b9ef1bb21ac6fbd/pydantic/v1/typing.py
def _is_classvar(ann_type: type[Any]) -> bool:
if _check_classvar(ann_type) or _check_classvar(get_origin(ann_type)):
return True
if ann_type.__class__ == ForwardRef and ann_type.__forward_arg__.startswith("ClassVar["): # type: ignore
return True
return False
def _init_cbv(cls: type[Any], instance: Any | None = None) -> None: def _init_cbv(cls: type[Any], instance: Any | None = None) -> None:
""" """
Idempotently modifies the provided `cls`, performing the following modifications: Idempotently modifies the provided `cls`, performing the following modifications:
@ -67,7 +85,7 @@ def _init_cbv(cls: type[Any], instance: Any | None = None) -> None:
dependency_names: list[str] = [] dependency_names: list[str] = []
for name, hint in get_type_hints(cls).items(): for name, hint in get_type_hints(cls).items():
if is_classvar(hint): if _is_classvar(hint):
continue continue
if name.startswith("_"): if name.startswith("_"):

View File

@ -108,7 +108,7 @@ class HttpRepo(Generic[C, R, U]):
) )
try: try:
item = self.repo.patch(item_id, data.dict(exclude_unset=True, exclude_defaults=True)) item = self.repo.patch(item_id, data.model_dump(exclude_unset=True, exclude_defaults=True))
except Exception as ex: except Exception as ex:
self.handle_exception(ex) self.handle_exception(ex)

View File

@ -43,7 +43,7 @@ class AdminUserManagementRoutes(BaseAdminController):
override=GroupInDB, override=GroupInDB,
) )
response.set_pagination_guides(router.url_path_for("get_all"), q.dict()) response.set_pagination_guides(router.url_path_for("get_all"), q.model_dump())
return response return response
@router.post("", response_model=GroupInDB, status_code=status.HTTP_201_CREATED) @router.post("", response_model=GroupInDB, status_code=status.HTTP_201_CREATED)

View File

@ -37,7 +37,7 @@ class AdminUserManagementRoutes(BaseAdminController):
override=UserOut, override=UserOut,
) )
response.set_pagination_guides(router.url_path_for("get_all"), q.dict()) response.set_pagination_guides(router.url_path_for("get_all"), q.model_dump())
return response return response
@router.post("", response_model=UserOut, status_code=201) @router.post("", response_model=UserOut, status_code=201)

View File

@ -18,7 +18,7 @@ class AdminServerTasksController(BaseAdminController):
override=ServerTask, override=ServerTask,
) )
response.set_pagination_guides(router.url_path_for("get_all"), q.dict()) response.set_pagination_guides(router.url_path_for("get_all"), q.model_dump())
return response return response
@router.post("/server-tasks", response_model=ServerTask, status_code=201) @router.post("/server-tasks", response_model=ServerTask, status_code=201)

View File

@ -53,4 +53,4 @@ def get_app_theme(resp: Response):
settings = get_app_settings() settings = get_app_settings()
resp.headers["Cache-Control"] = "public, max-age=604800" resp.headers["Cache-Control"] = "public, max-age=604800"
return AppTheme(**settings.theme.dict()) return AppTheme(**settings.theme.model_dump())

View File

@ -48,7 +48,7 @@ class MealieAuthToken(BaseModel):
@classmethod @classmethod
def respond(cls, token: str, token_type: str = "bearer") -> dict: def respond(cls, token: str, token_type: str = "bearer") -> dict:
return cls(access_token=token, token_type=token_type).dict() return cls(access_token=token, token_type=token_type).model_dump()
@public_router.post("/token") @public_router.post("/token")

View File

@ -47,7 +47,7 @@ class RecipeCommentRoutes(BaseUserController):
override=RecipeCommentOut, override=RecipeCommentOut,
) )
response.set_pagination_guides(router.url_path_for("get_all"), q.dict()) response.set_pagination_guides(router.url_path_for("get_all"), q.model_dump())
return response return response
@router.post("", response_model=RecipeCommentOut, status_code=201) @router.post("", response_model=RecipeCommentOut, status_code=201)

View File

@ -1,3 +1,5 @@
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException from fastapi import APIRouter, Depends, HTTPException
from pydantic import UUID4 from pydantic import UUID4
@ -36,12 +38,19 @@ class PublicCookbooksController(BasePublicExploreController):
search=search, search=search,
) )
response.set_pagination_guides(router.url_path_for("get_all", group_slug=self.group.slug), q.dict()) response.set_pagination_guides(router.url_path_for("get_all", group_slug=self.group.slug), q.model_dump())
return response return response
@router.get("/{item_id}", response_model=RecipeCookBook) @router.get("/{item_id}", response_model=RecipeCookBook)
def get_one(self, item_id: UUID4 | str) -> RecipeCookBook: def get_one(self, item_id: UUID4 | str) -> RecipeCookBook:
match_attr = "slug" if isinstance(item_id, str) else "id" if isinstance(item_id, UUID):
match_attr = "id"
else:
try:
UUID(item_id)
match_attr = "id"
except ValueError:
match_attr = "slug"
cookbook = self.cookbooks.get_one(item_id, match_attr) cookbook = self.cookbooks.get_one(item_id, match_attr)
if not cookbook or not cookbook.public: if not cookbook or not cookbook.public:

View File

@ -26,7 +26,7 @@ class PublicFoodsController(BasePublicExploreController):
search=search, search=search,
) )
response.set_pagination_guides(router.url_path_for("get_all", group_slug=self.group.slug), q.dict()) response.set_pagination_guides(router.url_path_for("get_all", group_slug=self.group.slug), q.model_dump())
return response return response
@router.get("/{item_id}", response_model=IngredientFood) @router.get("/{item_id}", response_model=IngredientFood)

View File

@ -31,7 +31,9 @@ class PublicCategoriesController(BasePublicExploreController):
search=search, search=search,
) )
response.set_pagination_guides(categories_router.url_path_for("get_all", group_slug=self.group.slug), q.dict()) response.set_pagination_guides(
categories_router.url_path_for("get_all", group_slug=self.group.slug), q.model_dump()
)
return response return response
@categories_router.get("/{item_id}", response_model=CategoryOut) @categories_router.get("/{item_id}", response_model=CategoryOut)
@ -59,7 +61,7 @@ class PublicTagsController(BasePublicExploreController):
search=search, search=search,
) )
response.set_pagination_guides(tags_router.url_path_for("get_all", group_slug=self.group.slug), q.dict()) response.set_pagination_guides(tags_router.url_path_for("get_all", group_slug=self.group.slug), q.model_dump())
return response return response
@tags_router.get("/{item_id}", response_model=TagOut) @tags_router.get("/{item_id}", response_model=TagOut)
@ -87,7 +89,7 @@ class PublicToolsController(BasePublicExploreController):
search=search, search=search,
) )
response.set_pagination_guides(tools_router.url_path_for("get_all", group_slug=self.group.slug), q.dict()) response.set_pagination_guides(tools_router.url_path_for("get_all", group_slug=self.group.slug), q.model_dump())
return response return response
@tools_router.get("/{item_id}", response_model=RecipeToolOut) @tools_router.get("/{item_id}", response_model=RecipeToolOut)

View File

@ -1,3 +1,5 @@
from uuid import UUID
import orjson import orjson
from fastapi import APIRouter, Depends, HTTPException, Query, Request from fastapi import APIRouter, Depends, HTTPException, Query, Request
from pydantic import UUID4 from pydantic import UUID4
@ -37,7 +39,14 @@ class PublicRecipesController(BasePublicExploreController):
) -> PaginationBase[RecipeSummary]: ) -> PaginationBase[RecipeSummary]:
cookbook_data: ReadCookBook | None = None cookbook_data: ReadCookBook | None = None
if search_query.cookbook: if search_query.cookbook:
cb_match_attr = "slug" if isinstance(search_query.cookbook, str) else "id" if isinstance(search_query.cookbook, UUID):
cb_match_attr = "id"
else:
try:
UUID(search_query.cookbook)
cb_match_attr = "id"
except ValueError:
cb_match_attr = "slug"
cookbook_data = self.cookbooks.get_one(search_query.cookbook, cb_match_attr) cookbook_data = self.cookbooks.get_one(search_query.cookbook, cb_match_attr)
if cookbook_data is None or not cookbook_data.public: if cookbook_data is None or not cookbook_data.public:
@ -64,13 +73,13 @@ class PublicRecipesController(BasePublicExploreController):
) )
# merge default pagination with the request's query params # merge default pagination with the request's query params
query_params = q.dict() | {**request.query_params} query_params = q.model_dump() | {**request.query_params}
pagination_response.set_pagination_guides( pagination_response.set_pagination_guides(
router.url_path_for("get_all", group_slug=self.group.slug), router.url_path_for("get_all", group_slug=self.group.slug),
{k: v for k, v in query_params.items() if v is not None}, {k: v for k, v in query_params.items() if v is not None},
) )
json_compatible_response = orjson.dumps(pagination_response.dict(by_alias=True)) json_compatible_response = orjson.dumps(pagination_response.model_dump(by_alias=True))
# Response is returned directly, to avoid validation and improve performance # Response is returned directly, to avoid validation and improve performance
return JSONBytes(content=json_compatible_response) return JSONBytes(content=json_compatible_response)

View File

@ -1,4 +1,5 @@
from functools import cached_property from functools import cached_property
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException from fastapi import APIRouter, Depends, HTTPException
from pydantic import UUID4 from pydantic import UUID4
@ -48,7 +49,7 @@ class GroupCookbookController(BaseCrudController):
override=ReadCookBook, override=ReadCookBook,
) )
response.set_pagination_guides(router.url_path_for("get_all"), q.dict()) response.set_pagination_guides(router.url_path_for("get_all"), q.model_dump())
return response return response
@router.post("", response_model=ReadCookBook, status_code=201) @router.post("", response_model=ReadCookBook, status_code=201)
@ -85,7 +86,15 @@ class GroupCookbookController(BaseCrudController):
@router.get("/{item_id}", response_model=RecipeCookBook) @router.get("/{item_id}", response_model=RecipeCookBook)
def get_one(self, item_id: UUID4 | str): def get_one(self, item_id: UUID4 | str):
match_attr = "slug" if isinstance(item_id, str) else "id" if isinstance(item_id, UUID):
match_attr = "id"
else:
try:
UUID(item_id)
match_attr = "id"
except ValueError:
match_attr = "slug"
cookbook = self.repo.get_one(item_id, match_attr) cookbook = self.repo.get_one(item_id, match_attr)
if cookbook is None: if cookbook is None:

View File

@ -58,7 +58,7 @@ class GroupEventsNotifierController(BaseUserController):
override=GroupEventNotifierOut, override=GroupEventNotifierOut,
) )
response.set_pagination_guides(router.url_path_for("get_all"), q.dict()) response.set_pagination_guides(router.url_path_for("get_all"), q.model_dump())
return response return response
@router.post("", response_model=GroupEventNotifierOut, status_code=201) @router.post("", response_model=GroupEventNotifierOut, status_code=201)

View File

@ -48,7 +48,7 @@ class MultiPurposeLabelsController(BaseUserController):
search=search, search=search,
) )
response.set_pagination_guides(router.url_path_for("get_all"), q.dict()) response.set_pagination_guides(router.url_path_for("get_all"), q.model_dump())
return response return response
@router.post("", response_model=MultiPurposeLabelOut) @router.post("", response_model=MultiPurposeLabelOut)

View File

@ -31,7 +31,7 @@ class GroupMealplanConfigController(BaseUserController):
override=PlanRulesOut, override=PlanRulesOut,
) )
response.set_pagination_guides(router.url_path_for("get_all"), q.dict()) response.set_pagination_guides(router.url_path_for("get_all"), q.model_dump())
return response return response
@router.post("", response_model=PlanRulesOut, status_code=201) @router.post("", response_model=PlanRulesOut, status_code=201)

View File

@ -44,6 +44,7 @@ class GroupMigrationController(BaseUserController):
"user_id": self.user.id, "user_id": self.user.id,
"group_id": self.group_id, "group_id": self.group_id,
"add_migration_tag": add_migration_tag, "add_migration_tag": add_migration_tag,
"translator": self.translator,
} }
table: dict[SupportedMigrations, type[BaseMigrator]] = { table: dict[SupportedMigrations, type[BaseMigrator]] = {

View File

@ -105,7 +105,7 @@ class ShoppingListItemController(BaseCrudController):
@item_router.get("", response_model=ShoppingListItemPagination) @item_router.get("", response_model=ShoppingListItemPagination)
def get_all(self, q: PaginationQuery = Depends()): def get_all(self, q: PaginationQuery = Depends()):
response = self.repo.page_all(pagination=q, override=ShoppingListItemOut) response = self.repo.page_all(pagination=q, override=ShoppingListItemOut)
response.set_pagination_guides(router.url_path_for("get_all"), q.dict()) response.set_pagination_guides(router.url_path_for("get_all"), q.model_dump())
return response return response
@item_router.post("/create-bulk", response_model=ShoppingListItemsCollectionOut, status_code=201) @item_router.post("/create-bulk", response_model=ShoppingListItemsCollectionOut, status_code=201)
@ -174,7 +174,7 @@ class ShoppingListController(BaseCrudController):
override=ShoppingListSummary, override=ShoppingListSummary,
) )
response.set_pagination_guides(router.url_path_for("get_all"), q.dict()) response.set_pagination_guides(router.url_path_for("get_all"), q.model_dump())
return response return response
@router.post("", response_model=ShoppingListOut, status_code=201) @router.post("", response_model=ShoppingListOut, status_code=201)

View File

@ -32,7 +32,7 @@ class ReadWebhookController(BaseUserController):
override=ReadWebhook, override=ReadWebhook,
) )
response.set_pagination_guides(router.url_path_for("get_all"), q.dict()) response.set_pagination_guides(router.url_path_for("get_all"), q.model_dump())
return response return response
@router.post("", response_model=ReadWebhook, status_code=201) @router.post("", response_model=ReadWebhook, status_code=201)

View File

@ -1,7 +1,7 @@
from functools import cached_property from functools import cached_property
from fastapi import APIRouter, Depends from fastapi import APIRouter, Depends
from pydantic import UUID4, BaseModel from pydantic import UUID4, BaseModel, ConfigDict
from mealie.routes._base import BaseCrudController, controller from mealie.routes._base import BaseCrudController, controller
from mealie.routes._base.mixins import HttpRepo from mealie.routes._base.mixins import HttpRepo
@ -20,9 +20,7 @@ class CategorySummary(BaseModel):
id: UUID4 id: UUID4
slug: str slug: str
name: str name: str
model_config = ConfigDict(from_attributes=True)
class Config:
orm_mode = True
@controller(router) @controller(router)
@ -46,7 +44,7 @@ class RecipeCategoryController(BaseCrudController):
search=search, search=search,
) )
response.set_pagination_guides(router.url_path_for("get_all"), q.dict()) response.set_pagination_guides(router.url_path_for("get_all"), q.model_dump())
return response return response
@router.post("", status_code=201) @router.post("", status_code=201)
@ -71,7 +69,7 @@ class RecipeCategoryController(BaseCrudController):
def get_one(self, item_id: UUID4): def get_one(self, item_id: UUID4):
"""Returns a list of recipes associated with the provided category.""" """Returns a list of recipes associated with the provided category."""
category_obj = self.mixins.get_one(item_id) category_obj = self.mixins.get_one(item_id)
category_obj = CategorySummary.from_orm(category_obj) category_obj = CategorySummary.model_validate(category_obj)
return category_obj return category_obj
@router.put("/{item_id}", response_model=CategorySummary) @router.put("/{item_id}", response_model=CategorySummary)
@ -119,7 +117,7 @@ class RecipeCategoryController(BaseCrudController):
def get_one_by_slug(self, category_slug: str): def get_one_by_slug(self, category_slug: str):
"""Returns a category object with the associated recieps relating to the category""" """Returns a category object with the associated recieps relating to the category"""
category: RecipeCategory = self.mixins.get_one(category_slug, "slug") category: RecipeCategory = self.mixins.get_one(category_slug, "slug")
return RecipeCategoryResponse.construct( return RecipeCategoryResponse.model_construct(
id=category.id, id=category.id,
slug=category.slug, slug=category.slug,
name=category.name, name=category.name,

View File

@ -35,7 +35,7 @@ class TagController(BaseCrudController):
search=search, search=search,
) )
response.set_pagination_guides(router.url_path_for("get_all"), q.dict()) response.set_pagination_guides(router.url_path_for("get_all"), q.model_dump())
return response return response
@router.get("/empty") @router.get("/empty")

View File

@ -32,7 +32,7 @@ class RecipeToolController(BaseUserController):
search=search, search=search,
) )
response.set_pagination_guides(router.url_path_for("get_all"), q.dict()) response.set_pagination_guides(router.url_path_for("get_all"), q.model_dump())
return response return response
@router.post("", response_model=RecipeTool, status_code=201) @router.post("", response_model=RecipeTool, status_code=201)

View File

@ -1,5 +1,6 @@
from functools import cached_property from functools import cached_property
from shutil import copyfileobj from shutil import copyfileobj
from uuid import UUID
from zipfile import ZipFile from zipfile import ZipFile
import orjson import orjson
@ -125,7 +126,7 @@ class RecipeExportController(BaseRecipeController):
recipe: Recipe = self.mixins.get_one(slug) recipe: Recipe = self.mixins.get_one(slug)
image_asset = recipe.image_dir.joinpath(RecipeImageTypes.original.value) image_asset = recipe.image_dir.joinpath(RecipeImageTypes.original.value)
with ZipFile(temp_path, "w") as myzip: with ZipFile(temp_path, "w") as myzip:
myzip.writestr(f"{slug}.json", recipe.json()) myzip.writestr(f"{slug}.json", recipe.model_dump_json())
if image_asset.is_file(): if image_asset.is_file():
myzip.write(image_asset, arcname=image_asset.name) myzip.write(image_asset, arcname=image_asset.name)
@ -164,7 +165,7 @@ class RecipeController(BaseRecipeController):
async def parse_recipe_url(self, req: ScrapeRecipe): async def parse_recipe_url(self, req: ScrapeRecipe):
"""Takes in a URL and attempts to scrape data and load it into the database""" """Takes in a URL and attempts to scrape data and load it into the database"""
try: try:
recipe, extras = await create_from_url(req.url) recipe, extras = await create_from_url(req.url, self.translator)
except ForceTimeoutException as e: except ForceTimeoutException as e:
raise HTTPException( raise HTTPException(
status_code=408, detail=ErrorResponse.respond(message="Recipe Scraping Timed Out") status_code=408, detail=ErrorResponse.respond(message="Recipe Scraping Timed Out")
@ -193,7 +194,7 @@ class RecipeController(BaseRecipeController):
@router.post("/create-url/bulk", status_code=202) @router.post("/create-url/bulk", status_code=202)
def parse_recipe_url_bulk(self, bulk: CreateRecipeByUrlBulk, bg_tasks: BackgroundTasks): def parse_recipe_url_bulk(self, bulk: CreateRecipeByUrlBulk, bg_tasks: BackgroundTasks):
"""Takes in a URL and attempts to scrape data and load it into the database""" """Takes in a URL and attempts to scrape data and load it into the database"""
bulk_scraper = RecipeBulkScraperService(self.service, self.repos, self.group) bulk_scraper = RecipeBulkScraperService(self.service, self.repos, self.group, self.translator)
report_id = bulk_scraper.get_report_id() report_id = bulk_scraper.get_report_id()
bg_tasks.add_task(bulk_scraper.scrape, bulk) bg_tasks.add_task(bulk_scraper.scrape, bulk)
@ -208,7 +209,7 @@ class RecipeController(BaseRecipeController):
async def test_parse_recipe_url(self, url: ScrapeRecipeTest): async def test_parse_recipe_url(self, url: ScrapeRecipeTest):
# Debugger should produce the same result as the scraper sees before cleaning # Debugger should produce the same result as the scraper sees before cleaning
try: try:
if scraped_data := await RecipeScraperPackage(url.url).scrape_url(): if scraped_data := await RecipeScraperPackage(url.url, self.translator).scrape_url():
return scraped_data.schema.data return scraped_data.schema.data
except ForceTimeoutException as e: except ForceTimeoutException as e:
raise HTTPException( raise HTTPException(
@ -244,7 +245,14 @@ class RecipeController(BaseRecipeController):
): ):
cookbook_data: ReadCookBook | None = None cookbook_data: ReadCookBook | None = None
if search_query.cookbook: if search_query.cookbook:
cb_match_attr = "slug" if isinstance(search_query.cookbook, str) else "id" if isinstance(search_query.cookbook, UUID):
cb_match_attr = "id"
else:
try:
UUID(search_query.cookbook)
cb_match_attr = "id"
except ValueError:
cb_match_attr = "slug"
cookbook_data = self.cookbooks_repo.get_one(search_query.cookbook, cb_match_attr) cookbook_data = self.cookbooks_repo.get_one(search_query.cookbook, cb_match_attr)
if cookbook_data is None: if cookbook_data is None:
@ -265,13 +273,13 @@ class RecipeController(BaseRecipeController):
) )
# merge default pagination with the request's query params # merge default pagination with the request's query params
query_params = q.dict() | {**request.query_params} query_params = q.model_dump() | {**request.query_params}
pagination_response.set_pagination_guides( pagination_response.set_pagination_guides(
router.url_path_for("get_all"), router.url_path_for("get_all"),
{k: v for k, v in query_params.items() if v is not None}, {k: v for k, v in query_params.items() if v is not None},
) )
json_compatible_response = orjson.dumps(pagination_response.dict(by_alias=True)) json_compatible_response = orjson.dumps(pagination_response.model_dump(by_alias=True))
# Response is returned directly, to avoid validation and improve performance # Response is returned directly, to avoid validation and improve performance
return JSONBytes(content=json_compatible_response) return JSONBytes(content=json_compatible_response)

View File

@ -49,7 +49,7 @@ class RecipeTimelineEventsController(BaseCrudController):
override=RecipeTimelineEventOut, override=RecipeTimelineEventOut,
) )
response.set_pagination_guides(events_router.url_path_for("get_all"), q.dict()) response.set_pagination_guides(events_router.url_path_for("get_all"), q.model_dump())
return response return response
@events_router.post("", response_model=RecipeTimelineEventOut, status_code=201) @events_router.post("", response_model=RecipeTimelineEventOut, status_code=201)

View File

@ -30,7 +30,7 @@ class RecipeSharedController(BaseUserController):
@router.post("", response_model=RecipeShareToken, status_code=201) @router.post("", response_model=RecipeShareToken, status_code=201)
def create_one(self, data: RecipeShareTokenCreate) -> RecipeShareToken: def create_one(self, data: RecipeShareTokenCreate) -> RecipeShareToken:
save_data = RecipeShareTokenSave(**data.dict(), group_id=self.group_id) save_data = RecipeShareTokenSave(**data.model_dump(), group_id=self.group_id)
return self.mixins.create_one(save_data) return self.mixins.create_one(save_data)
@router.get("/{item_id}", response_model=RecipeShareToken) @router.get("/{item_id}", response_model=RecipeShareToken)

View File

@ -52,7 +52,7 @@ class IngredientFoodsController(BaseUserController):
search=search, search=search,
) )
response.set_pagination_guides(router.url_path_for("get_all"), q.dict()) response.set_pagination_guides(router.url_path_for("get_all"), q.model_dump())
return response return response
@router.post("", response_model=IngredientFood, status_code=201) @router.post("", response_model=IngredientFood, status_code=201)

View File

@ -52,7 +52,7 @@ class IngredientUnitsController(BaseUserController):
search=search, search=search,
) )
response.set_pagination_guides(router.url_path_for("get_all"), q.dict()) response.set_pagination_guides(router.url_path_for("get_all"), q.model_dump())
return response return response
@router.post("", response_model=IngredientUnit, status_code=201) @router.post("", response_model=IngredientUnit, status_code=201)

View File

@ -29,7 +29,7 @@ class AdminUserController(BaseAdminController):
override=UserOut, override=UserOut,
) )
response.set_pagination_guides(admin_router.url_path_for("get_all"), q.dict()) response.set_pagination_guides(admin_router.url_path_for("get_all"), q.model_dump())
return response return response
@admin_router.post("", response_model=UserOut, status_code=201) @admin_router.post("", response_model=UserOut, status_code=201)
@ -103,7 +103,7 @@ class UserController(BaseUserController):
) )
try: try:
self.repos.users.update(item_id, new_data.dict()) self.repos.users.update(item_id, new_data.model_dump())
except Exception as e: except Exception as e:
raise HTTPException( raise HTTPException(
status.HTTP_400_BAD_REQUEST, status.HTTP_400_BAD_REQUEST,

View File

@ -0,0 +1,252 @@
"""
From Pydantic V1: https://github.com/pydantic/pydantic/blob/abcf81ec104d2da70894ac0402ae11a7186c5e47/pydantic/datetime_parse.py
"""
import re
from datetime import date, datetime, time, timedelta, timezone
date_expr = r"(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})"
time_expr = (
r"(?P<hour>\d{1,2}):(?P<minute>\d{1,2})"
r"(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?"
r"(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$"
)
date_re = re.compile(f"{date_expr}$")
time_re = re.compile(time_expr)
datetime_re = re.compile(f"{date_expr}[T ]{time_expr}")
standard_duration_re = re.compile(
r"^"
r"(?:(?P<days>-?\d+) (days?, )?)?"
r"((?:(?P<hours>-?\d+):)(?=\d+:\d+))?"
r"(?:(?P<minutes>-?\d+):)?"
r"(?P<seconds>-?\d+)"
r"(?:\.(?P<microseconds>\d{1,6})\d{0,6})?"
r"$"
)
# Support the sections of ISO 8601 date representation that are accepted by timedelta
iso8601_duration_re = re.compile(
r"^(?P<sign>[-+]?)"
r"P"
r"(?:(?P<days>\d+(.\d+)?)D)?"
r"(?:T"
r"(?:(?P<hours>\d+(.\d+)?)H)?"
r"(?:(?P<minutes>\d+(.\d+)?)M)?"
r"(?:(?P<seconds>\d+(.\d+)?)S)?"
r")?"
r"$"
)
EPOCH = datetime(1970, 1, 1)
# if greater than this, the number is in ms, if less than or equal it's in seconds
# (in seconds this is 11th October 2603, in ms it's 20th August 1970)
MS_WATERSHED = int(2e10)
# slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9
MAX_NUMBER = int(3e20)
class DateError(ValueError):
def __init__(self, *args: object) -> None:
super().__init__("invalid date format")
class TimeError(ValueError):
def __init__(self, *args: object) -> None:
super().__init__("invalid time format")
class DateTimeError(ValueError):
def __init__(self, *args: object) -> None:
super().__init__("invalid datetime format")
class DurationError(ValueError):
def __init__(self, *args: object) -> None:
super().__init__("invalid duration format")
def get_numeric(value: str | bytes | int | float, native_expected_type: str) -> None | int | float:
if isinstance(value, int | float):
return value
try:
return float(value)
except ValueError:
return None
except TypeError as e:
raise TypeError(f"invalid type; expected {native_expected_type}, string, bytes, int or float") from e
def from_unix_seconds(seconds: int | float) -> datetime:
if seconds > MAX_NUMBER:
return datetime.max
elif seconds < -MAX_NUMBER:
return datetime.min
while abs(seconds) > MS_WATERSHED:
seconds /= 1000
dt = EPOCH + timedelta(seconds=seconds)
return dt.replace(tzinfo=timezone.utc)
def _parse_timezone(value: str | None, error: type[Exception]) -> None | int | timezone:
if value == "Z":
return timezone.utc
elif value is not None:
offset_mins = int(value[-2:]) if len(value) > 3 else 0
offset = 60 * int(value[1:3]) + offset_mins
if value[0] == "-":
offset = -offset
try:
return timezone(timedelta(minutes=offset))
except ValueError as e:
raise error() from e
else:
return None
def parse_date(value: date | str | bytes | int | float) -> date:
"""
Parse a date/int/float/string and return a datetime.date.
Raise ValueError if the input is well formatted but not a valid date.
Raise ValueError if the input isn't well formatted.
"""
if isinstance(value, date):
if isinstance(value, datetime):
return value.date()
else:
return value
number = get_numeric(value, "date")
if number is not None:
return from_unix_seconds(number).date()
if isinstance(value, bytes):
value = value.decode()
match = date_re.match(value) # type: ignore
if match is None:
raise DateError()
kw = {k: int(v) for k, v in match.groupdict().items()}
try:
return date(**kw)
except ValueError as e:
raise DateError() from e
def parse_time(value: time | str | bytes | int | float) -> time:
"""
Parse a time/string and return a datetime.time.
Raise ValueError if the input is well formatted but not a valid time.
Raise ValueError if the input isn't well formatted, in particular if it contains an offset.
"""
if isinstance(value, time):
return value
number = get_numeric(value, "time")
if number is not None:
if number >= 86400:
# doesn't make sense since the time time loop back around to 0
raise TimeError()
return (datetime.min + timedelta(seconds=number)).time()
if isinstance(value, bytes):
value = value.decode()
match = time_re.match(value) # type: ignore
if match is None:
raise TimeError()
kw = match.groupdict()
if kw["microsecond"]:
kw["microsecond"] = kw["microsecond"].ljust(6, "0")
tzinfo = _parse_timezone(kw.pop("tzinfo"), TimeError)
kw_: dict[str, None | int | timezone] = {k: int(v) for k, v in kw.items() if v is not None}
kw_["tzinfo"] = tzinfo
try:
return time(**kw_) # type: ignore
except ValueError as e:
raise TimeError() from e
def parse_datetime(value: datetime | str | bytes | int | float) -> datetime:
"""
Parse a datetime/int/float/string and return a datetime.datetime.
This function supports time zone offsets. When the input contains one,
the output uses a timezone with a fixed offset from UTC.
Raise ValueError if the input is well formatted but not a valid datetime.
Raise ValueError if the input isn't well formatted.
"""
if isinstance(value, datetime):
return value
number = get_numeric(value, "datetime")
if number is not None:
return from_unix_seconds(number)
if isinstance(value, bytes):
value = value.decode()
match = datetime_re.match(value) # type: ignore
if match is None:
raise DateTimeError()
kw = match.groupdict()
if kw["microsecond"]:
kw["microsecond"] = kw["microsecond"].ljust(6, "0")
tzinfo = _parse_timezone(kw.pop("tzinfo"), DateTimeError)
kw_: dict[str, None | int | timezone] = {k: int(v) for k, v in kw.items() if v is not None}
kw_["tzinfo"] = tzinfo
try:
return datetime(**kw_) # type: ignore
except ValueError as e:
raise DateTimeError() from e
def parse_duration(value: str | bytes | int | float) -> timedelta:
"""
Parse a duration int/float/string and return a datetime.timedelta.
The preferred format for durations in Django is '%d %H:%M:%S.%f'.
Also supports ISO 8601 representation.
"""
if isinstance(value, timedelta):
return value
if isinstance(value, int | float):
# below code requires a string
value = f"{value:f}"
elif isinstance(value, bytes):
value = value.decode()
try:
match = standard_duration_re.match(value) or iso8601_duration_re.match(value)
except TypeError as e:
raise TypeError("invalid type; expected timedelta, string, bytes, int or float") from e
if not match:
raise DurationError()
kw = match.groupdict()
sign = -1 if kw.pop("sign", "+") == "-" else 1
if kw.get("microseconds"):
kw["microseconds"] = kw["microseconds"].ljust(6, "0")
if kw.get("seconds") and kw.get("microseconds") and kw["seconds"].startswith("-"):
kw["microseconds"] = "-" + kw["microseconds"]
kw_ = {k: float(v) for k, v in kw.items() if v is not None}
return sign * timedelta(**kw_)

Some files were not shown because too many files have changed in this diff Show More