diff --git a/mealie/core/dependencies/dependencies.py b/mealie/core/dependencies/dependencies.py index 8477f0cdc3fd..76b5a6614aad 100644 --- a/mealie/core/dependencies/dependencies.py +++ b/mealie/core/dependencies/dependencies.py @@ -78,6 +78,9 @@ async def get_current_user(token: str = Depends(oauth2_scheme), session=Depends( user = repos.users.get_one(token_data.user_id, "id", any_case=False) + # If we don't commit here, lazy-loads from user relationships will leave some table lock in postgres + # which can cause quite a bit of pain further down the line + session.commit() if user is None: raise credentials_exception return user diff --git a/mealie/schema/user/user.py b/mealie/schema/user/user.py index 15dbb6395386..7d1d2d0e15e1 100644 --- a/mealie/schema/user/user.py +++ b/mealie/schema/user/user.py @@ -78,14 +78,19 @@ class UserBase(MealieModel): can_organize: bool = False class Config: - orm_mode = True + class _UserGetter(GetterDict): + def get(self, key: Any, default: Any = None) -> Any: + # Transform extras into key-value dict + if key == "group": + value = super().get(key, default) + return value.group.name - @classmethod - def getter_dict(cls, name_orm: User): - return { - **GetterDict(name_orm), - "group": name_orm.group.name, - } + # Keep all other fields as they are + else: + return super().get(key, default) + + orm_mode = True + getter_dict = _UserGetter schema_extra = { "example": { diff --git a/mealie/services/backups_v2/alchemy_exporter.py b/mealie/services/backups_v2/alchemy_exporter.py index 58d8cd35b0f6..eba567cccb3d 100644 --- a/mealie/services/backups_v2/alchemy_exporter.py +++ b/mealie/services/backups_v2/alchemy_exporter.py @@ -1,15 +1,19 @@ import datetime -import json +from os import path from pathlib import Path from fastapi.encoders import jsonable_encoder from pydantic import BaseModel -from sqlalchemy import MetaData, create_engine, insert, text +from sqlalchemy import ForeignKeyConstraint, MetaData, create_engine, insert, text from sqlalchemy.engine import base from sqlalchemy.orm import sessionmaker +from alembic import command +from alembic.config import Config from mealie.services._base_service import BaseService +PROJECT_DIR = Path(__file__).parent.parent.parent.parent + class AlchemyExporter(BaseService): connection_str: str @@ -53,33 +57,6 @@ class AlchemyExporter(BaseService): data[key] = AlchemyExporter.DateTimeParser(time=value).time return data - @staticmethod - def _compare_schemas(schema1: dict, schema2: dict) -> bool: - try: - # validate alembic version(s) are the same - return schema1["alembic_version"] == schema2["alembic_version"] - except KeyError: - return False - - @staticmethod - def validate_schemas(schema1: Path | dict, schema2: Path | dict) -> bool: - """ - Validates that the schema of the database matches the schema of the database. In practice, - this means validating that the alembic version is the same - """ - - def extract_json(file: Path) -> dict: - with open(file) as f: - return json.loads(f.read()) - - if isinstance(schema1, Path): - schema1 = extract_json(schema1) - - if isinstance(schema2, Path): - schema2 = extract_json(schema2) - - return AlchemyExporter._compare_schemas(schema1, schema2) - def dump_schema(self) -> dict: """ Returns the schema of the SQLAlchemy database as a python dictionary. This dictionary is wrapped by @@ -115,6 +92,17 @@ class AlchemyExporter(BaseService): return jsonable_encoder(result) def restore(self, db_dump: dict) -> None: + # setup alembic to run migrations up the version of the backup + alembic_data = db_dump["alembic_version"] + alembic_version = alembic_data[0]["version_num"] + + alembic_cfg = Config(str(PROJECT_DIR / "alembic.ini")) + # alembic's file resolver wants to use the "mealie" subdirectory when called from within the server package + # Just override this to use the correct migrations path + alembic_cfg.set_main_option("script_location", path.join(PROJECT_DIR, "alembic")) + command.upgrade(alembic_cfg, alembic_version) + + del db_dump["alembic_version"] """Restores all data from dictionary into the database""" with self.engine.begin() as connection: data = AlchemyExporter.convert_to_datetime(db_dump) @@ -123,8 +111,8 @@ class AlchemyExporter(BaseService): for table_name, rows in data.items(): if not rows: continue - table = self.meta.tables[table_name] + connection.execute(table.delete()) connection.execute(insert(table), rows) if self.engine.dialect.name == "postgresql": @@ -151,19 +139,45 @@ SELECT SETVAL('shopping_list_item_extras_id_seq', (SELECT MAX(id) FROM shopping_ ) ) + # Run all migrations up to current version + command.upgrade(alembic_cfg, "head") + def drop_all(self) -> None: """Drops all data from the database""" - self.meta.reflect(bind=self.engine) - with self.session_maker() as session: - is_postgres = self.settings.DB_ENGINE == "postgres" + from sqlalchemy.engine.reflection import Inspector + from sqlalchemy.schema import DropConstraint, DropTable, MetaData, Table - try: - if is_postgres: - session.execute(text("SET session_replication_role = 'replica'")) + with self.engine.begin() as connection: + inspector = Inspector.from_engine(self.engine) - for table in self.meta.sorted_tables: - session.execute(text(f"DELETE FROM {table.name}")) - finally: - if is_postgres: - session.execute(text("SET session_replication_role = 'origin'")) - session.commit() + # We need to re-create a minimal metadata with only the required things to + # successfully emit drop constraints and tables commands for postgres (based + # on the actual schema of the running instance) + meta = MetaData() + tables = [] + all_fkeys = [] + for table_name in inspector.get_table_names(): + fkeys = [] + + for fkey in inspector.get_foreign_keys(table_name): + if not fkey["name"]: + continue + + fkeys.append(ForeignKeyConstraint((), (), name=fkey["name"])) + + tables.append(Table(table_name, meta, *fkeys)) + all_fkeys.extend(fkeys) + + if self.engine.dialect.name == "postgresql": + # Only pg needs foreign key dropping + for fkey in all_fkeys: + connection.execute(DropConstraint(fkey)) + + for table in tables: + connection.execute(DropTable(table)) + # I have no idea how to drop all custom types with sqlalchemy + # Since we only have one, this will have to do for now + connection.execute(text("DROP TYPE authmethod")) + else: + for table in tables: + connection.execute(DropTable(table)) diff --git a/mealie/services/backups_v2/backup_v2.py b/mealie/services/backups_v2/backup_v2.py index 1b9885c5d0d4..d2c1d87fd4fc 100644 --- a/mealie/services/backups_v2/backup_v2.py +++ b/mealie/services/backups_v2/backup_v2.py @@ -89,10 +89,6 @@ class BackupV2(BaseService): database_json = contents.read_tables() - if not AlchemyExporter.validate_schemas(database_json, self.db_exporter.dump()): - self.logger.error("Invalid backup file. Database schemas do not match") - raise BackupSchemaMismatch("Invalid backup file. Database schemas do not match") - # ================================ # Purge Database diff --git a/tests/unit_tests/services_tests/backup_v2_tests/test_alchemy_exporter.py b/tests/unit_tests/services_tests/backup_v2_tests/test_alchemy_exporter.py index 2d958daca9bf..b1bbb4a380aa 100644 --- a/tests/unit_tests/services_tests/backup_v2_tests/test_alchemy_exporter.py +++ b/tests/unit_tests/services_tests/backup_v2_tests/test_alchemy_exporter.py @@ -12,40 +12,3 @@ def test_alchemy_exporter(): assert data["alembic_version"] == alembic_versions() assert json.dumps(data, indent=4) # Make sure data is json-serializable - - -def test_validate_schemas(): - schema = { - "alembic_version": alembic_versions(), - } - match = { - "alembic_version": alembic_versions(), - } - - invalid_version = { - "alembic_version": [{"version_num": "not-valid-schema"}], - } - - assert AlchemyExporter.validate_schemas(schema, match) - assert not AlchemyExporter.validate_schemas(schema, invalid_version) - - schema_with_tables = { - "alembic_version": alembic_versions(), - "recipes": [ - { - "id": 1, - } - ], - } - match_with_tables = { - "alembic_version": alembic_versions(), - "recipes": [ - { - "id": 2, - } - ], - } - - assert AlchemyExporter.validate_schemas(schema_with_tables, match_with_tables) - assert AlchemyExporter.validate_schemas(schema_with_tables, match_with_tables) - assert AlchemyExporter.validate_schemas(schema_with_tables, match_with_tables)