mirror of
https://github.com/mealie-recipes/mealie.git
synced 2025-07-09 03:04:54 -04:00
feat(backend): ➕ Add Rich for Development Logging
This commit is contained in:
parent
9386cc320b
commit
cb85b14e01
@ -1,4 +1,3 @@
|
||||
from mealie.services.recipe.all_recipes import subscripte_to_recipe_events
|
||||
import uvicorn
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.gzip import GZipMiddleware
|
||||
@ -15,6 +14,7 @@ from mealie.routes.shopping_list import shopping_list_router
|
||||
from mealie.routes.site_settings import settings_router
|
||||
from mealie.routes.users import user_router
|
||||
from mealie.services.events import create_general_event
|
||||
from mealie.services.recipe.all_recipes import subscripte_to_recipe_events
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
@ -86,7 +86,6 @@ def system_startup():
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
uvicorn.run(
|
||||
"app:app",
|
||||
host="0.0.0.0",
|
||||
@ -94,7 +93,8 @@ def main():
|
||||
reload=True,
|
||||
reload_dirs=["mealie"],
|
||||
debug=True,
|
||||
log_level="info",
|
||||
log_level="debug",
|
||||
use_colors=True,
|
||||
log_config=None,
|
||||
workers=1,
|
||||
forwarded_allow_ips="*",
|
||||
|
@ -1,36 +1,66 @@
|
||||
import logging
|
||||
import sys
|
||||
from dataclasses import dataclass
|
||||
from functools import lru_cache
|
||||
|
||||
from mealie.core.config import DATA_DIR
|
||||
|
||||
from .config import settings
|
||||
|
||||
LOGGER_FILE = DATA_DIR.joinpath("mealie.log")
|
||||
DATE_FORMAT = "%d-%b-%y %H:%M:%S"
|
||||
LOGGER_FORMAT = "%(levelname)s: %(asctime)s \t%(message)s"
|
||||
LOGGER_HANDLER = None
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format=LOGGER_FORMAT, datefmt="%d-%b-%y %H:%M:%S")
|
||||
|
||||
@dataclass
|
||||
class LoggerConfig:
|
||||
handlers: list
|
||||
format: str
|
||||
date_format: str
|
||||
logger_file: str
|
||||
level: str = logging.INFO
|
||||
|
||||
|
||||
@lru_cache
|
||||
def get_logger_config():
|
||||
if not settings.PRODUCTION:
|
||||
from rich.logging import RichHandler
|
||||
|
||||
return LoggerConfig(
|
||||
handlers=[RichHandler(rich_tracebacks=True)],
|
||||
format=None,
|
||||
date_format=None,
|
||||
logger_file=None,
|
||||
)
|
||||
|
||||
return LoggerConfig(
|
||||
handlers=[
|
||||
logging.FileHandler(LOGGER_FILE),
|
||||
logging.Formatter(LOGGER_FORMAT, datefmt=DATE_FORMAT),
|
||||
],
|
||||
format="%(levelname)s: %(asctime)s \t%(message)s",
|
||||
date_format="%d-%b-%y %H:%M:%S",
|
||||
logger_file=LOGGER_FILE,
|
||||
)
|
||||
|
||||
|
||||
logger_config = get_logger_config()
|
||||
|
||||
logging.basicConfig(
|
||||
level=logger_config.level,
|
||||
format=logger_config.format,
|
||||
datefmt=logger_config.date_format,
|
||||
handlers=logger_config.handlers,
|
||||
)
|
||||
|
||||
|
||||
def logger_init() -> logging.Logger:
|
||||
""" Returns the Root Loggin Object for Mealie """
|
||||
logger = logging.getLogger("mealie")
|
||||
logger.propagate = False
|
||||
|
||||
# File Handler
|
||||
output_file_handler = logging.FileHandler(LOGGER_FILE)
|
||||
handler_format = logging.Formatter(LOGGER_FORMAT, datefmt=DATE_FORMAT)
|
||||
output_file_handler.setFormatter(handler_format)
|
||||
|
||||
# Stdout
|
||||
stdout_handler = logging.StreamHandler(sys.stdout)
|
||||
stdout_handler.setFormatter(handler_format)
|
||||
|
||||
logger.addHandler(output_file_handler)
|
||||
logger.addHandler(stdout_handler)
|
||||
|
||||
return logger
|
||||
return logging.getLogger("mealie")
|
||||
|
||||
|
||||
root_logger = logger_init()
|
||||
root_logger.info("Testing Root Logger")
|
||||
|
||||
|
||||
def get_logger(module=None) -> logging.Logger:
|
||||
|
@ -92,11 +92,9 @@ class _Recipes(BaseDocument):
|
||||
)
|
||||
|
||||
def subscribe(self, func: Callable) -> None:
|
||||
print("Subscripe", func)
|
||||
self.observers.append(func)
|
||||
|
||||
def update_observers(self) -> None:
|
||||
print("Updating Observers", self.observers)
|
||||
for observer in self.observers:
|
||||
observer()
|
||||
|
||||
|
@ -21,19 +21,16 @@ class BaseDocument:
|
||||
def get_all(
|
||||
self, session: Session, limit: int = None, order_by: str = None, start=0, end=9999, override_schema=None
|
||||
) -> list[dict]:
|
||||
logger.info("Starting Query")
|
||||
eff_schema = override_schema or self.schema
|
||||
|
||||
if order_by:
|
||||
order_attr = getattr(self.sql_model, str(order_by))
|
||||
logger.info("Ending Query")
|
||||
|
||||
return [
|
||||
eff_schema.from_orm(x)
|
||||
for x in session.query(self.sql_model).order_by(order_attr.desc()).offset(start).limit(limit).all()
|
||||
]
|
||||
|
||||
logger.info("Ending Query")
|
||||
return [eff_schema.from_orm(x) for x in session.query(self.sql_model).offset(start).limit(limit).all()]
|
||||
|
||||
def get_all_limit_columns(self, session: Session, fields: list[str], limit: int = None) -> list[SqlAlchemyBase]:
|
||||
|
@ -70,9 +70,9 @@ def main():
|
||||
session = create_session()
|
||||
init_user = db.users.get(session, "1", "id")
|
||||
if init_user:
|
||||
print("Database Exists")
|
||||
logger.info("Database Exists")
|
||||
else:
|
||||
print("Database Doesn't Exists, Initializing...")
|
||||
logger.info("Database Doesn't Exists, Initializing...")
|
||||
init_db()
|
||||
create_general_event("Initialize Database", "Initialize database with default values", session)
|
||||
|
||||
|
@ -29,7 +29,6 @@ def get_all_meals(
|
||||
def get_this_week(session: Session = Depends(generate_session), current_user: UserInDB = Depends(get_current_user)):
|
||||
""" Returns the meal plan data for this week """
|
||||
plans = db.groups.get_meals(session, current_user.group)
|
||||
print(plans)
|
||||
if plans:
|
||||
return plans[0]
|
||||
|
||||
|
@ -47,7 +47,6 @@ async def delete_comment(
|
||||
):
|
||||
""" Delete comment from the Database """
|
||||
comment: CommentOut = db.comments.get(session, id)
|
||||
print(current_user.id, comment.user.id, current_user.admin)
|
||||
if current_user.id == comment.user.id or current_user.admin:
|
||||
db.comments.delete(session, id)
|
||||
return
|
||||
|
@ -109,7 +109,6 @@ def get_recipe(recipe_slug: str, session: Session = Depends(generate_session), i
|
||||
|
||||
if not recipe:
|
||||
raise HTTPException(status.HTTP_404_NOT_FOUND)
|
||||
print(recipe.settings.public, is_user)
|
||||
if recipe.settings.public or is_user:
|
||||
|
||||
return recipe
|
||||
|
@ -6,8 +6,7 @@ from mealie.db.database import db
|
||||
from mealie.db.db_setup import generate_session
|
||||
from mealie.routes.deps import get_admin_user
|
||||
from mealie.routes.routers import AdminAPIRouter
|
||||
from mealie.schema.user import (SignUpIn, SignUpOut, SignUpToken, UserIn,
|
||||
UserInDB)
|
||||
from mealie.schema.user import SignUpIn, SignUpOut, SignUpToken, UserIn, UserInDB
|
||||
from mealie.services.events import create_user_event
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
|
@ -12,7 +12,6 @@ router = APIRouter(prefix="/api/utils", tags=["Utils"], include_in_schema=True)
|
||||
async def download_file(file_path: Optional[Path] = Depends(validate_file_token)):
|
||||
"""Uses a file token obtained by an active user to retrieve a file from the operating
|
||||
system."""
|
||||
print("File Name:", file_path)
|
||||
if not file_path.is_file():
|
||||
raise HTTPException(status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
@ -97,8 +97,7 @@ class ExportDatabase:
|
||||
zip_path = app_dirs.BACKUP_DIR.joinpath(f"{self.main_dir.name}")
|
||||
shutil.make_archive(zip_path, "zip", self.main_dir)
|
||||
|
||||
shutil.rmtree(app_dirs.TEMP_DIR)
|
||||
|
||||
shutil.rmtree(app_dirs.TEMP_DIR, ignore_errors=True)
|
||||
return str(zip_path.absolute()) + ".zip"
|
||||
|
||||
|
||||
|
@ -6,18 +6,10 @@ from typing import Callable
|
||||
|
||||
from mealie.core.config import app_dirs
|
||||
from mealie.db.database import db
|
||||
from mealie.schema.admin import (
|
||||
CustomPageImport,
|
||||
CustomPageOut,
|
||||
GroupImport,
|
||||
NotificationImport,
|
||||
RecipeImport,
|
||||
SettingsImport,
|
||||
SiteSettings,
|
||||
SiteTheme,
|
||||
ThemeImport,
|
||||
UserImport,
|
||||
)
|
||||
from mealie.schema.admin import (CustomPageImport, CustomPageOut, GroupImport,
|
||||
NotificationImport, RecipeImport,
|
||||
SettingsImport, SiteSettings, SiteTheme,
|
||||
ThemeImport, UserImport)
|
||||
from mealie.schema.events import EventNotificationIn
|
||||
from mealie.schema.recipe import CommentOut, Recipe
|
||||
from mealie.schema.user import UpdateGroup, UserInDB
|
||||
@ -369,6 +361,7 @@ def import_database(
|
||||
if import_themes:
|
||||
theme_report = import_session.import_themes()
|
||||
|
||||
page_report = []
|
||||
if import_pages:
|
||||
page_report = import_session.import_pages()
|
||||
|
||||
|
@ -27,8 +27,6 @@ def post_notifications(event: Event, notification_urls=list[str], hard_fail=Fals
|
||||
if not status and hard_fail:
|
||||
raise Exception("Apprise URL Add Failed")
|
||||
|
||||
print(attachment)
|
||||
|
||||
apobj.notify(
|
||||
body=event.text,
|
||||
title=event.title,
|
||||
|
@ -35,7 +35,6 @@ def write_image(recipe_slug: str, file_data: bytes, extension: str) -> Path:
|
||||
with open(image_path, "ab") as f:
|
||||
shutil.copyfileobj(file_data, f)
|
||||
|
||||
print(image_path)
|
||||
minify.minify_image(image_path, force=True)
|
||||
|
||||
return image_path
|
||||
|
@ -45,7 +45,6 @@ class MigrationBase(BaseModel):
|
||||
|
||||
@staticmethod
|
||||
def json_reader(json_file: Path) -> dict:
|
||||
print(json_file)
|
||||
with open(json_file, "r") as f:
|
||||
return json.loads(f.read())
|
||||
|
||||
|
@ -4,8 +4,7 @@ from typing import Optional
|
||||
from mealie.core.config import app_dirs
|
||||
from mealie.schema.admin import MigrationImport
|
||||
from mealie.services.migrations import helpers
|
||||
from mealie.services.migrations._migration_base import (MigrationAlias,
|
||||
MigrationBase)
|
||||
from mealie.services.migrations._migration_base import MigrationAlias, MigrationBase
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
|
||||
|
@ -3,10 +3,13 @@ from functools import lru_cache
|
||||
|
||||
from fastapi import Response
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
from mealie.core.root_logger import get_logger
|
||||
from mealie.db.database import db
|
||||
from mealie.db.db_setup import SessionLocal
|
||||
from mealie.schema.recipe import RecipeSummary
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_all_recipes_user(limit, start):
|
||||
@ -29,10 +32,11 @@ def get_all_recipes_public(limit, start):
|
||||
|
||||
|
||||
def clear_all_cache():
|
||||
print("Cache Cleared")
|
||||
get_all_recipes_user.cache_clear()
|
||||
get_all_recipes_public.cache_clear()
|
||||
logger.info("All Recipes Cache Cleared")
|
||||
|
||||
|
||||
def subscripte_to_recipe_events():
|
||||
db.recipes.subscribe(clear_all_cache)
|
||||
logger.info("All Recipes Subscribed to Database Events")
|
||||
|
@ -4,8 +4,11 @@ import re
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List
|
||||
|
||||
from mealie.core.root_logger import get_logger
|
||||
from slugify import slugify
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
|
||||
def clean(recipe_data: dict, url=None) -> dict:
|
||||
"""Main entrypoint to clean a recipe extracted from the web
|
||||
@ -39,13 +42,9 @@ def clean_string(text: str) -> str:
|
||||
if isinstance(text, list):
|
||||
text = text[0]
|
||||
|
||||
print(type(text))
|
||||
|
||||
if text == "" or text is None:
|
||||
return ""
|
||||
|
||||
print(text)
|
||||
|
||||
cleaned_text = html.unescape(text)
|
||||
cleaned_text = re.sub("<[^<]+?>", "", cleaned_text)
|
||||
cleaned_text = re.sub(" +", " ", cleaned_text)
|
||||
@ -122,7 +121,7 @@ def instructions(instructions) -> List[dict]:
|
||||
|
||||
return [{"text": _instruction(step["text"])} for step in instructions if step["@type"] == "HowToStep"]
|
||||
except Exception as e:
|
||||
print(e)
|
||||
logger.error(e)
|
||||
# Not "@type", try "type"
|
||||
try:
|
||||
return [
|
||||
|
Loading…
x
Reference in New Issue
Block a user