improve backend performance with caching

This commit is contained in:
hay-kot 2021-08-07 11:28:40 -08:00
parent c13fb6743c
commit 51876508f3
5 changed files with 86 additions and 16 deletions

View File

@ -1,5 +1,7 @@
from mealie.services.recipe.all_recipes import subscripte_to_recipe_events
import uvicorn
from fastapi import FastAPI
from fastapi.middleware.gzip import GZipMiddleware
from mealie.core.config import APP_VERSION, settings
from mealie.core.root_logger import get_logger
@ -24,6 +26,8 @@ app = FastAPI(
redoc_url=settings.REDOC_URL,
)
app.add_middleware(GZipMiddleware, minimum_size=1000)
def start_scheduler():
import mealie.services.scheduler.scheduled_jobs # noqa: F401
@ -78,6 +82,7 @@ def system_startup():
)
)
create_general_event("Application Startup", f"Mealie API started on port {settings.API_PORT}")
subscripte_to_recipe_events()
def main():

View File

@ -1,5 +1,6 @@
from logging import getLogger
from random import randint
from typing import Callable
from mealie.db.db_base import BaseDocument
from mealie.db.models.event import Event, EventNotification
@ -20,11 +21,15 @@ from mealie.schema.admin import SiteTheme
from mealie.schema.events import Event as EventSchema
from mealie.schema.events import EventNotificationIn
from mealie.schema.meal_plan import MealPlanOut, ShoppingListOut
from mealie.schema.recipe import (CommentOut, Recipe, RecipeCategoryResponse,
RecipeIngredientFood, RecipeIngredientUnit,
RecipeTagResponse)
from mealie.schema.user import (GroupInDB, LongLiveTokenInDB, SignUpOut,
UserInDB)
from mealie.schema.recipe import (
CommentOut,
Recipe,
RecipeCategoryResponse,
RecipeIngredientFood,
RecipeIngredientUnit,
RecipeTagResponse,
)
from mealie.schema.user import GroupInDB, LongLiveTokenInDB, SignUpOut, UserInDB
from sqlalchemy.orm.session import Session
logger = getLogger()
@ -36,9 +41,9 @@ class _Recipes(BaseDocument):
self.sql_model: RecipeModel = RecipeModel
self.schema: Recipe = Recipe
def get_all_not_private(
self, session: Session, limit: int = None, order_by: str = None, start=0, override_schema=None
):
self.observers = []
def get_all_public(self, session: Session, limit: int = None, order_by: str = None, start=0, override_schema=None):
eff_schema = override_schema or self.schema
if order_by:
@ -86,6 +91,15 @@ class _Recipes(BaseDocument):
session, attribute_name=RecipeModel.tags, attr_match=None, count=count, override_schema=override_schema
)
def subscribe(self, func: Callable) -> None:
print("Subscripe", func)
self.observers.append(func)
def update_observers(self) -> None:
print("Updating Observers", self.observers)
for observer in self.observers:
observer()
class _IngredientFoods(BaseDocument):
def __init__(self) -> None:

View File

@ -16,20 +16,24 @@ class BaseDocument:
self.store: str
self.sql_model: SqlAlchemyBase
self.schema: BaseModel
self.observers: list = None
def get_all(
self, session: Session, limit: int = None, order_by: str = None, start=0, end=9999, override_schema=None
) -> list[dict]:
logger.info("Starting Query")
eff_schema = override_schema or self.schema
if order_by:
order_attr = getattr(self.sql_model, str(order_by))
logger.info("Ending Query")
return [
eff_schema.from_orm(x)
for x in session.query(self.sql_model).order_by(order_attr.desc()).offset(start).limit(limit).all()
]
logger.info("Ending Query")
return [eff_schema.from_orm(x) for x in session.query(self.sql_model).offset(start).limit(limit).all()]
def get_all_limit_columns(self, session: Session, fields: list[str], limit: int = None) -> list[SqlAlchemyBase]:
@ -129,6 +133,9 @@ class BaseDocument:
session.add(new_document)
session.commit()
if hasattr(self, "update_observers"):
self.update_observers()
return self.schema.from_orm(new_document)
def update(self, session: Session, match_value: str, new_data: dict) -> BaseModel:
@ -146,6 +153,9 @@ class BaseDocument:
entry = self._query_one(session=session, match_value=match_value)
entry.update(session=session, **new_data)
if hasattr(self, "update_observers"):
self.update_observers()
session.commit()
return self.schema.from_orm(entry)
@ -169,12 +179,18 @@ class BaseDocument:
session.delete(result)
session.commit()
if hasattr(self, "update_observers"):
self.update_observers()
return results_as_model
def delete_all(self, session: Session) -> None:
session.query(self.sql_model).delete()
session.commit()
if hasattr(self, "update_observers"):
self.update_observers()
def count_all(self, session: Session, match_key=None, match_value=None) -> int:
if None in [match_key, match_value]:
return session.query(self.sql_model).count()

View File

@ -3,14 +3,15 @@ from mealie.db.database import db
from mealie.db.db_setup import generate_session
from mealie.routes.deps import is_logged_in
from mealie.schema.recipe import RecipeSummary
from mealie.services.recipe.all_recipes import get_all_recipes_public, get_all_recipes_user
from slugify import slugify
from sqlalchemy.orm.session import Session
router = APIRouter(tags=["Query All Recipes"])
@router.get("/api/recipes", response_model=list[RecipeSummary])
async def get_recipe_summary(
@router.get("/api/recipes")
def get_recipe_summary(
start=0, limit=9999, session: Session = Depends(generate_session), user: bool = Depends(is_logged_in)
):
"""
@ -26,14 +27,10 @@ async def get_recipe_summary(
"""
if user:
return db.recipes.get_all(
session, limit=limit, start=start, order_by="date_updated", override_schema=RecipeSummary
)
return get_all_recipes_user(limit, start)
else:
return db.recipes.get_all_not_private(
session, limit=limit, start=start, order_by="date_updated", override_schema=RecipeSummary
)
return get_all_recipes_public(limit, start)
@router.get("/api/recipes/summary/untagged", response_model=list[RecipeSummary])

View File

@ -0,0 +1,38 @@
import json
from functools import lru_cache
from fastapi import Response
from fastapi.encoders import jsonable_encoder
from mealie.db.database import db
from mealie.db.db_setup import SessionLocal
from mealie.schema.recipe import RecipeSummary
@lru_cache(maxsize=1)
def get_all_recipes_user(limit, start):
with SessionLocal() as session:
all_recipes: list[RecipeSummary] = db.recipes.get_all(
session, limit=limit, start=start, order_by="date_updated", override_schema=RecipeSummary
)
all_recipes_json = [recipe.dict() for recipe in all_recipes]
return Response(content=json.dumps(jsonable_encoder(all_recipes_json)), media_type="application/json")
@lru_cache(maxsize=1)
def get_all_recipes_public(limit, start):
with SessionLocal() as session:
all_recipes: list[RecipeSummary] = db.recipes.get_all_public(
session, limit=limit, start=start, order_by="date_updated", override_schema=RecipeSummary
)
all_recipes_json = [recipe.dict() for recipe in all_recipes]
return Response(content=json.dumps(jsonable_encoder(all_recipes_json)), media_type="application/json")
def clear_all_cache():
print("Cache Cleared")
get_all_recipes_user.cache_clear()
get_all_recipes_public.cache_clear()
def subscripte_to_recipe_events():
db.recipes.subscribe(clear_all_cache)