fix: various alembic migration issues with queries (#2773)

* set expire_on_commit false to avoid refresh

* converted deletes to raw SQL statements

* call update statements directly in sql

* parameterized text queries

* replace orm with raw sql to avoid db differences
This commit is contained in:
Michael Genson 2023-12-06 12:37:19 -06:00 committed by GitHub
parent 1d1d61df77
commit 310069a7e9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 41 additions and 55 deletions

View File

@ -33,21 +33,29 @@ def populate_normalized_fields():
) )
for unit in units: for unit in units:
if unit.name is not None: if unit.name is not None:
unit.name_normalized = IngredientUnitModel.normalize(unit.name) session.execute(
sa.text(
f"UPDATE {IngredientUnitModel.__tablename__} SET name_normalized=:name_normalized WHERE id=:id"
).bindparams(name_normalized=IngredientUnitModel.normalize(unit.name), id=unit.id)
)
if unit.abbreviation is not None: if unit.abbreviation is not None:
unit.abbreviation_normalized = IngredientUnitModel.normalize(unit.abbreviation) session.execute(
sa.text(
session.add(unit) f"UPDATE {IngredientUnitModel.__tablename__} SET abbreviation_normalized=:abbreviation_normalized WHERE id=:id"
).bindparams(abbreviation_normalized=IngredientUnitModel.normalize(unit.abbreviation), id=unit.id)
)
foods = ( foods = (
session.execute(select(IngredientFoodModel).options(orm.load_only(IngredientFoodModel.name))).scalars().all() session.execute(select(IngredientFoodModel).options(orm.load_only(IngredientFoodModel.name))).scalars().all()
) )
for food in foods: for food in foods:
if food.name is not None: if food.name is not None:
food.name_normalized = IngredientFoodModel.normalize(food.name) session.execute(
sa.text(
session.add(food) f"UPDATE {IngredientFoodModel.__tablename__} SET name_normalized=:name_normalized WHERE id=:id"
).bindparams(name_normalized=IngredientFoodModel.normalize(food.name), id=food.id)
)
session.commit() session.commit()

View File

@ -13,10 +13,8 @@ import sqlalchemy as sa
from pydantic import UUID4 from pydantic import UUID4
from sqlalchemy.orm import Session, load_only from sqlalchemy.orm import Session, load_only
import mealie.db.migration_types
from alembic import op from alembic import op
from mealie.db.models._model_base import SqlAlchemyBase from mealie.db.models._model_base import SqlAlchemyBase
from mealie.db.models._model_utils.guid import GUID
from mealie.db.models.group.shopping_list import ShoppingListItem from mealie.db.models.group.shopping_list import ShoppingListItem
from mealie.db.models.labels import MultiPurposeLabel from mealie.db.models.labels import MultiPurposeLabel
from mealie.db.models.recipe.ingredient import IngredientFoodModel, IngredientUnitModel, RecipeIngredientModel from mealie.db.models.recipe.ingredient import IngredientFoodModel, IngredientUnitModel, RecipeIngredientModel
@ -43,26 +41,25 @@ def _is_postgres():
return op.get_context().dialect.name == "postgresql" return op.get_context().dialect.name == "postgresql"
def _get_duplicates(session: Session, model: SqlAlchemyBase) -> defaultdict[str, list[str]]: def _get_duplicates(session: Session, model: SqlAlchemyBase) -> defaultdict[str, list]:
duplicate_map: defaultdict[str, list[str]] = defaultdict(list) duplicate_map: defaultdict[str, list] = defaultdict(list)
for obj in session.query(model).options(load_only(model.id, model.group_id, model.name)).all():
key = f"{obj.group_id}$${obj.name}" query = session.execute(sa.text(f"SELECT id, group_id, name FROM {model.__tablename__}"))
duplicate_map[key].append(str(obj.id)) for row in query.all():
id, group_id, name = row
key = f"{group_id}$${name}"
duplicate_map[key].append(id)
return duplicate_map return duplicate_map
def _resolve_duplicate_food( def _resolve_duplicate_food(
session: Session, session: Session,
keep_food: IngredientFoodModel,
keep_food_id: UUID4, keep_food_id: UUID4,
dupe_food_id: UUID4, dupe_food_id: UUID4,
): ):
for shopping_list_item in session.query(ShoppingListItem).filter_by(food_id=dupe_food_id).all(): for shopping_list_item in session.query(ShoppingListItem).filter_by(food_id=dupe_food_id).all():
shopping_list_item.food_id = keep_food_id shopping_list_item.food_id = keep_food_id
shopping_list_item.food = keep_food
session.commit()
for recipe_ingredient in ( for recipe_ingredient in (
session.query(RecipeIngredientModel) session.query(RecipeIngredientModel)
@ -71,62 +68,43 @@ def _resolve_duplicate_food(
.all() .all()
): ):
recipe_ingredient.food_id = keep_food_id recipe_ingredient.food_id = keep_food_id
recipe_ingredient.food = keep_food
session.commit() session.execute(
sa.text(f"DELETE FROM {IngredientFoodModel.__tablename__} WHERE id=:id").bindparams(id=dupe_food_id)
session.query(IngredientFoodModel).options(load_only(IngredientFoodModel.id)).filter_by(id=dupe_food_id).delete() )
session.commit()
def _resolve_duplicate_unit( def _resolve_duplicate_unit(
session: Session, session: Session,
keep_unit: IngredientUnitModel,
keep_unit_id: UUID4, keep_unit_id: UUID4,
dupe_unit_id: UUID4, dupe_unit_id: UUID4,
): ):
for shopping_list_item in session.query(ShoppingListItem).filter_by(unit_id=dupe_unit_id).all(): for shopping_list_item in session.query(ShoppingListItem).filter_by(unit_id=dupe_unit_id).all():
shopping_list_item.unit_id = keep_unit_id shopping_list_item.unit_id = keep_unit_id
shopping_list_item.unit = keep_unit
session.commit()
for recipe_ingredient in session.query(RecipeIngredientModel).filter_by(unit_id=dupe_unit_id).all(): for recipe_ingredient in session.query(RecipeIngredientModel).filter_by(unit_id=dupe_unit_id).all():
recipe_ingredient.unit_id = keep_unit_id recipe_ingredient.unit_id = keep_unit_id
recipe_ingredient.unit = keep_unit
session.commit() session.execute(
sa.text(f"DELETE FROM {IngredientUnitModel.__tablename__} WHERE id=:id").bindparams(id=dupe_unit_id)
session.query(IngredientUnitModel).options(load_only(IngredientUnitModel.id)).filter_by(id=dupe_unit_id).delete() )
session.commit()
def _resolve_duplicate_label( def _resolve_duplicate_label(
session: Session, session: Session,
keep_label: MultiPurposeLabel,
keep_label_id: UUID4, keep_label_id: UUID4,
dupe_label_id: UUID4, dupe_label_id: UUID4,
): ):
for shopping_list_item in session.query(ShoppingListItem).filter_by(label_id=dupe_label_id).all(): for shopping_list_item in session.query(ShoppingListItem).filter_by(label_id=dupe_label_id).all():
shopping_list_item.label_id = keep_label_id shopping_list_item.label_id = keep_label_id
shopping_list_item.label = keep_label
session.commit()
for ingredient_food in session.query(IngredientFoodModel).filter_by(label_id=dupe_label_id).all(): for ingredient_food in session.query(IngredientFoodModel).filter_by(label_id=dupe_label_id).all():
ingredient_food.label_id = keep_label_id ingredient_food.label_id = keep_label_id
ingredient_food.label = keep_label
session.commit() session.execute(sa.text(f"DELETE FROM {MultiPurposeLabel.__tablename__} WHERE id=:id").bindparams(id=dupe_label_id))
session.query(MultiPurposeLabel).options(load_only(MultiPurposeLabel.id)).filter_by(id=dupe_label_id).delete()
session.commit()
def _resolve_duplicate_foods_units_labels(): def _resolve_duplicate_foods_units_labels(session: Session):
bind = op.get_bind()
session = Session(bind=bind)
for model, resolve_func in [ for model, resolve_func in [
(IngredientFoodModel, _resolve_duplicate_food), (IngredientFoodModel, _resolve_duplicate_food),
(IngredientUnitModel, _resolve_duplicate_unit), (IngredientUnitModel, _resolve_duplicate_unit),
@ -138,9 +116,8 @@ def _resolve_duplicate_foods_units_labels():
continue continue
keep_id = ids[0] keep_id = ids[0]
keep_obj = session.query(model).options(load_only(model.id)).filter_by(id=keep_id).first()
for dupe_id in ids[1:]: for dupe_id in ids[1:]:
resolve_func(session, keep_obj, keep_id, dupe_id) resolve_func(session, keep_id, dupe_id)
def _remove_duplicates_from_m2m_table(session: Session, table_meta: TableMeta): def _remove_duplicates_from_m2m_table(session: Session, table_meta: TableMeta):
@ -163,20 +140,20 @@ def _remove_duplicates_from_m2m_table(session: Session, table_meta: TableMeta):
) )
session.execute(query) session.execute(query)
session.commit()
def _remove_duplicates_from_m2m_tables(table_metas: list[TableMeta]): def _remove_duplicates_from_m2m_tables(session: Session, table_metas: list[TableMeta]):
bind = op.get_bind()
session = Session(bind=bind)
for table_meta in table_metas: for table_meta in table_metas:
_remove_duplicates_from_m2m_table(session, table_meta) _remove_duplicates_from_m2m_table(session, table_meta)
def upgrade(): def upgrade():
_resolve_duplicate_foods_units_labels() bind = op.get_bind()
session = Session(bind=bind)
_resolve_duplicate_foods_units_labels(session)
_remove_duplicates_from_m2m_tables( _remove_duplicates_from_m2m_tables(
session,
[ [
TableMeta("cookbooks_to_categories", "cookbook_id", "category_id"), TableMeta("cookbooks_to_categories", "cookbook_id", "category_id"),
TableMeta("cookbooks_to_tags", "cookbook_id", "tag_id"), TableMeta("cookbooks_to_tags", "cookbook_id", "tag_id"),
@ -189,12 +166,13 @@ def upgrade():
TableMeta("recipes_to_tools", "recipe_id", "tool_id"), TableMeta("recipes_to_tools", "recipe_id", "tool_id"),
TableMeta("users_to_favorites", "user_id", "recipe_id"), TableMeta("users_to_favorites", "user_id", "recipe_id"),
TableMeta("shopping_lists_multi_purpose_labels", "shopping_list_id", "label_id"), TableMeta("shopping_lists_multi_purpose_labels", "shopping_list_id", "label_id"),
] ],
) )
session.commit()
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
# we use batch_alter_table here because otherwise this fails on sqlite # we use batch_alter_table here because otherwise this fails on sqlite
# M2M # M2M
with op.batch_alter_table("cookbooks_to_categories") as batch_op: with op.batch_alter_table("cookbooks_to_categories") as batch_op:
batch_op.create_unique_constraint("cookbook_id_category_id_key", ["cookbook_id", "category_id"]) batch_op.create_unique_constraint("cookbook_id_category_id_key", ["cookbook_id", "category_id"])