mirror of
https://github.com/mealie-recipes/mealie.git
synced 2025-06-02 21:25:45 -04:00
feat: implement backup restoration from old db schemas (#2213)
* Remove some implicit lazy-loads from user serialization * implement full backup restore across different database versions
This commit is contained in:
parent
3118b0e423
commit
ccb0b43cef
@ -78,6 +78,9 @@ async def get_current_user(token: str = Depends(oauth2_scheme), session=Depends(
|
|||||||
|
|
||||||
user = repos.users.get_one(token_data.user_id, "id", any_case=False)
|
user = repos.users.get_one(token_data.user_id, "id", any_case=False)
|
||||||
|
|
||||||
|
# If we don't commit here, lazy-loads from user relationships will leave some table lock in postgres
|
||||||
|
# which can cause quite a bit of pain further down the line
|
||||||
|
session.commit()
|
||||||
if user is None:
|
if user is None:
|
||||||
raise credentials_exception
|
raise credentials_exception
|
||||||
return user
|
return user
|
||||||
|
@ -78,14 +78,19 @@ class UserBase(MealieModel):
|
|||||||
can_organize: bool = False
|
can_organize: bool = False
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
orm_mode = True
|
class _UserGetter(GetterDict):
|
||||||
|
def get(self, key: Any, default: Any = None) -> Any:
|
||||||
|
# Transform extras into key-value dict
|
||||||
|
if key == "group":
|
||||||
|
value = super().get(key, default)
|
||||||
|
return value.group.name
|
||||||
|
|
||||||
@classmethod
|
# Keep all other fields as they are
|
||||||
def getter_dict(cls, name_orm: User):
|
else:
|
||||||
return {
|
return super().get(key, default)
|
||||||
**GetterDict(name_orm),
|
|
||||||
"group": name_orm.group.name,
|
orm_mode = True
|
||||||
}
|
getter_dict = _UserGetter
|
||||||
|
|
||||||
schema_extra = {
|
schema_extra = {
|
||||||
"example": {
|
"example": {
|
||||||
|
@ -1,15 +1,19 @@
|
|||||||
import datetime
|
import datetime
|
||||||
import json
|
from os import path
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from fastapi.encoders import jsonable_encoder
|
from fastapi.encoders import jsonable_encoder
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from sqlalchemy import MetaData, create_engine, insert, text
|
from sqlalchemy import ForeignKeyConstraint, MetaData, create_engine, insert, text
|
||||||
from sqlalchemy.engine import base
|
from sqlalchemy.engine import base
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
|
from alembic import command
|
||||||
|
from alembic.config import Config
|
||||||
from mealie.services._base_service import BaseService
|
from mealie.services._base_service import BaseService
|
||||||
|
|
||||||
|
PROJECT_DIR = Path(__file__).parent.parent.parent.parent
|
||||||
|
|
||||||
|
|
||||||
class AlchemyExporter(BaseService):
|
class AlchemyExporter(BaseService):
|
||||||
connection_str: str
|
connection_str: str
|
||||||
@ -53,33 +57,6 @@ class AlchemyExporter(BaseService):
|
|||||||
data[key] = AlchemyExporter.DateTimeParser(time=value).time
|
data[key] = AlchemyExporter.DateTimeParser(time=value).time
|
||||||
return data
|
return data
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _compare_schemas(schema1: dict, schema2: dict) -> bool:
|
|
||||||
try:
|
|
||||||
# validate alembic version(s) are the same
|
|
||||||
return schema1["alembic_version"] == schema2["alembic_version"]
|
|
||||||
except KeyError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def validate_schemas(schema1: Path | dict, schema2: Path | dict) -> bool:
|
|
||||||
"""
|
|
||||||
Validates that the schema of the database matches the schema of the database. In practice,
|
|
||||||
this means validating that the alembic version is the same
|
|
||||||
"""
|
|
||||||
|
|
||||||
def extract_json(file: Path) -> dict:
|
|
||||||
with open(file) as f:
|
|
||||||
return json.loads(f.read())
|
|
||||||
|
|
||||||
if isinstance(schema1, Path):
|
|
||||||
schema1 = extract_json(schema1)
|
|
||||||
|
|
||||||
if isinstance(schema2, Path):
|
|
||||||
schema2 = extract_json(schema2)
|
|
||||||
|
|
||||||
return AlchemyExporter._compare_schemas(schema1, schema2)
|
|
||||||
|
|
||||||
def dump_schema(self) -> dict:
|
def dump_schema(self) -> dict:
|
||||||
"""
|
"""
|
||||||
Returns the schema of the SQLAlchemy database as a python dictionary. This dictionary is wrapped by
|
Returns the schema of the SQLAlchemy database as a python dictionary. This dictionary is wrapped by
|
||||||
@ -115,6 +92,17 @@ class AlchemyExporter(BaseService):
|
|||||||
return jsonable_encoder(result)
|
return jsonable_encoder(result)
|
||||||
|
|
||||||
def restore(self, db_dump: dict) -> None:
|
def restore(self, db_dump: dict) -> None:
|
||||||
|
# setup alembic to run migrations up the version of the backup
|
||||||
|
alembic_data = db_dump["alembic_version"]
|
||||||
|
alembic_version = alembic_data[0]["version_num"]
|
||||||
|
|
||||||
|
alembic_cfg = Config(str(PROJECT_DIR / "alembic.ini"))
|
||||||
|
# alembic's file resolver wants to use the "mealie" subdirectory when called from within the server package
|
||||||
|
# Just override this to use the correct migrations path
|
||||||
|
alembic_cfg.set_main_option("script_location", path.join(PROJECT_DIR, "alembic"))
|
||||||
|
command.upgrade(alembic_cfg, alembic_version)
|
||||||
|
|
||||||
|
del db_dump["alembic_version"]
|
||||||
"""Restores all data from dictionary into the database"""
|
"""Restores all data from dictionary into the database"""
|
||||||
with self.engine.begin() as connection:
|
with self.engine.begin() as connection:
|
||||||
data = AlchemyExporter.convert_to_datetime(db_dump)
|
data = AlchemyExporter.convert_to_datetime(db_dump)
|
||||||
@ -123,8 +111,8 @@ class AlchemyExporter(BaseService):
|
|||||||
for table_name, rows in data.items():
|
for table_name, rows in data.items():
|
||||||
if not rows:
|
if not rows:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
table = self.meta.tables[table_name]
|
table = self.meta.tables[table_name]
|
||||||
|
|
||||||
connection.execute(table.delete())
|
connection.execute(table.delete())
|
||||||
connection.execute(insert(table), rows)
|
connection.execute(insert(table), rows)
|
||||||
if self.engine.dialect.name == "postgresql":
|
if self.engine.dialect.name == "postgresql":
|
||||||
@ -151,19 +139,45 @@ SELECT SETVAL('shopping_list_item_extras_id_seq', (SELECT MAX(id) FROM shopping_
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Run all migrations up to current version
|
||||||
|
command.upgrade(alembic_cfg, "head")
|
||||||
|
|
||||||
def drop_all(self) -> None:
|
def drop_all(self) -> None:
|
||||||
"""Drops all data from the database"""
|
"""Drops all data from the database"""
|
||||||
self.meta.reflect(bind=self.engine)
|
from sqlalchemy.engine.reflection import Inspector
|
||||||
with self.session_maker() as session:
|
from sqlalchemy.schema import DropConstraint, DropTable, MetaData, Table
|
||||||
is_postgres = self.settings.DB_ENGINE == "postgres"
|
|
||||||
|
|
||||||
try:
|
with self.engine.begin() as connection:
|
||||||
if is_postgres:
|
inspector = Inspector.from_engine(self.engine)
|
||||||
session.execute(text("SET session_replication_role = 'replica'"))
|
|
||||||
|
|
||||||
for table in self.meta.sorted_tables:
|
# We need to re-create a minimal metadata with only the required things to
|
||||||
session.execute(text(f"DELETE FROM {table.name}"))
|
# successfully emit drop constraints and tables commands for postgres (based
|
||||||
finally:
|
# on the actual schema of the running instance)
|
||||||
if is_postgres:
|
meta = MetaData()
|
||||||
session.execute(text("SET session_replication_role = 'origin'"))
|
tables = []
|
||||||
session.commit()
|
all_fkeys = []
|
||||||
|
for table_name in inspector.get_table_names():
|
||||||
|
fkeys = []
|
||||||
|
|
||||||
|
for fkey in inspector.get_foreign_keys(table_name):
|
||||||
|
if not fkey["name"]:
|
||||||
|
continue
|
||||||
|
|
||||||
|
fkeys.append(ForeignKeyConstraint((), (), name=fkey["name"]))
|
||||||
|
|
||||||
|
tables.append(Table(table_name, meta, *fkeys))
|
||||||
|
all_fkeys.extend(fkeys)
|
||||||
|
|
||||||
|
if self.engine.dialect.name == "postgresql":
|
||||||
|
# Only pg needs foreign key dropping
|
||||||
|
for fkey in all_fkeys:
|
||||||
|
connection.execute(DropConstraint(fkey))
|
||||||
|
|
||||||
|
for table in tables:
|
||||||
|
connection.execute(DropTable(table))
|
||||||
|
# I have no idea how to drop all custom types with sqlalchemy
|
||||||
|
# Since we only have one, this will have to do for now
|
||||||
|
connection.execute(text("DROP TYPE authmethod"))
|
||||||
|
else:
|
||||||
|
for table in tables:
|
||||||
|
connection.execute(DropTable(table))
|
||||||
|
@ -89,10 +89,6 @@ class BackupV2(BaseService):
|
|||||||
|
|
||||||
database_json = contents.read_tables()
|
database_json = contents.read_tables()
|
||||||
|
|
||||||
if not AlchemyExporter.validate_schemas(database_json, self.db_exporter.dump()):
|
|
||||||
self.logger.error("Invalid backup file. Database schemas do not match")
|
|
||||||
raise BackupSchemaMismatch("Invalid backup file. Database schemas do not match")
|
|
||||||
|
|
||||||
# ================================
|
# ================================
|
||||||
# Purge Database
|
# Purge Database
|
||||||
|
|
||||||
|
@ -12,40 +12,3 @@ def test_alchemy_exporter():
|
|||||||
|
|
||||||
assert data["alembic_version"] == alembic_versions()
|
assert data["alembic_version"] == alembic_versions()
|
||||||
assert json.dumps(data, indent=4) # Make sure data is json-serializable
|
assert json.dumps(data, indent=4) # Make sure data is json-serializable
|
||||||
|
|
||||||
|
|
||||||
def test_validate_schemas():
|
|
||||||
schema = {
|
|
||||||
"alembic_version": alembic_versions(),
|
|
||||||
}
|
|
||||||
match = {
|
|
||||||
"alembic_version": alembic_versions(),
|
|
||||||
}
|
|
||||||
|
|
||||||
invalid_version = {
|
|
||||||
"alembic_version": [{"version_num": "not-valid-schema"}],
|
|
||||||
}
|
|
||||||
|
|
||||||
assert AlchemyExporter.validate_schemas(schema, match)
|
|
||||||
assert not AlchemyExporter.validate_schemas(schema, invalid_version)
|
|
||||||
|
|
||||||
schema_with_tables = {
|
|
||||||
"alembic_version": alembic_versions(),
|
|
||||||
"recipes": [
|
|
||||||
{
|
|
||||||
"id": 1,
|
|
||||||
}
|
|
||||||
],
|
|
||||||
}
|
|
||||||
match_with_tables = {
|
|
||||||
"alembic_version": alembic_versions(),
|
|
||||||
"recipes": [
|
|
||||||
{
|
|
||||||
"id": 2,
|
|
||||||
}
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
assert AlchemyExporter.validate_schemas(schema_with_tables, match_with_tables)
|
|
||||||
assert AlchemyExporter.validate_schemas(schema_with_tables, match_with_tables)
|
|
||||||
assert AlchemyExporter.validate_schemas(schema_with_tables, match_with_tables)
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user