fix: migrate error on duplicate foods/units/labels (#2637)

Co-authored-by: Hayden <64056131+hay-kot@users.noreply.github.com>
This commit is contained in:
Michael Genson 2023-10-15 20:51:29 -05:00 committed by GitHub
parent 4b04137a04
commit df3b10b5c2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -5,6 +5,7 @@ Revises: 0341b154f79a
Create Date: 2023-10-04 14:29:26.688065 Create Date: 2023-10-04 14:29:26.688065
""" """
from collections import defaultdict
from dataclasses import dataclass from dataclasses import dataclass
from typing import Any from typing import Any
@ -14,6 +15,10 @@ from sqlalchemy.orm import Session
import mealie.db.migration_types import mealie.db.migration_types
from alembic import op from alembic import op
from mealie.db.models._model_base import SqlAlchemyBase from mealie.db.models._model_base import SqlAlchemyBase
from mealie.db.models._model_utils.guid import GUID
from mealie.db.models.group.shopping_list import ShoppingListItem
from mealie.db.models.labels import MultiPurposeLabel
from mealie.db.models.recipe.ingredient import IngredientFoodModel, IngredientUnitModel, RecipeIngredientModel
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = "dded3119c1fe" revision = "dded3119c1fe"
@ -37,6 +42,87 @@ def _is_postgres():
return op.get_context().dialect.name == "postgresql" return op.get_context().dialect.name == "postgresql"
def _get_duplicates(session: Session, model: SqlAlchemyBase) -> defaultdict[str, list[str]]:
duplicate_map: defaultdict[str, list[str]] = defaultdict(list)
for obj in session.query(model).all():
key = f"{obj.group_id}$${obj.name}"
duplicate_map[key].append(str(obj.id))
return duplicate_map
def _resolve_duplicate_food(session: Session, keep_food: IngredientFoodModel, dupe_food: IngredientFoodModel):
for shopping_list_item in session.query(ShoppingListItem).filter_by(food_id=dupe_food.id).all():
shopping_list_item.food_id = keep_food.id
shopping_list_item.food = keep_food
session.commit()
for recipe_ingredient in session.query(RecipeIngredientModel).filter_by(food_id=dupe_food.id).all():
recipe_ingredient.food_id = keep_food.id
recipe_ingredient.food = keep_food
session.commit()
session.delete(dupe_food)
session.commit()
def _resolve_duplicate_unit(session: Session, keep_unit: IngredientUnitModel, dupe_unit: IngredientUnitModel):
for shopping_list_item in session.query(ShoppingListItem).filter_by(unit_id=dupe_unit.id).all():
shopping_list_item.unit_id = keep_unit.id
shopping_list_item.unit = keep_unit
session.commit()
for recipe_ingredient in session.query(RecipeIngredientModel).filter_by(unit_id=dupe_unit.id).all():
recipe_ingredient.unit_id = keep_unit.id
recipe_ingredient.unit = keep_unit
session.commit()
session.delete(dupe_unit)
session.commit()
def _resolve_duplicate_label(session: Session, keep_label: MultiPurposeLabel, dupe_label: MultiPurposeLabel):
for shopping_list_item in session.query(ShoppingListItem).filter_by(label_id=dupe_label.id).all():
shopping_list_item.label_id = keep_label.id
shopping_list_item.label = keep_label
session.commit()
for ingredient_food in session.query(IngredientFoodModel).filter_by(label_id=dupe_label.id).all():
ingredient_food.label_id = keep_label.id
ingredient_food.label = keep_label
session.commit()
session.delete(dupe_label)
session.commit()
def _resolve_duplivate_foods_units_labels():
bind = op.get_bind()
session = Session(bind=bind)
for model, resolve_func in [
(IngredientFoodModel, _resolve_duplicate_food),
(IngredientUnitModel, _resolve_duplicate_unit),
(MultiPurposeLabel, _resolve_duplicate_label),
]:
duplicate_map = _get_duplicates(session, model)
for ids in duplicate_map.values():
if len(ids) < 2:
continue
keep_id = ids[0]
keep_obj = session.query(model).filter_by(id=keep_id).first()
for dupe_id in ids[1:]:
dupe_obj = session.query(model).filter_by(id=dupe_id).first()
resolve_func(session, keep_obj, dupe_obj)
def _remove_duplicates_from_m2m_table(session: Session, table_meta: TableMeta): def _remove_duplicates_from_m2m_table(session: Session, table_meta: TableMeta):
if _is_postgres(): if _is_postgres():
default_pk = "CTID" default_pk = "CTID"
@ -69,9 +155,9 @@ def _remove_duplicates_from_m2m_tables(table_metas: list[TableMeta]):
def upgrade(): def upgrade():
_resolve_duplivate_foods_units_labels()
_remove_duplicates_from_m2m_tables( _remove_duplicates_from_m2m_tables(
[ [
# M2M
TableMeta("cookbooks_to_categories", "cookbook_id", "category_id"), TableMeta("cookbooks_to_categories", "cookbook_id", "category_id"),
TableMeta("cookbooks_to_tags", "cookbook_id", "tag_id"), TableMeta("cookbooks_to_tags", "cookbook_id", "tag_id"),
TableMeta("cookbooks_to_tools", "cookbook_id", "tool_id"), TableMeta("cookbooks_to_tools", "cookbook_id", "tool_id"),
@ -83,10 +169,6 @@ def upgrade():
TableMeta("recipes_to_tools", "recipe_id", "tool_id"), TableMeta("recipes_to_tools", "recipe_id", "tool_id"),
TableMeta("users_to_favorites", "user_id", "recipe_id"), TableMeta("users_to_favorites", "user_id", "recipe_id"),
TableMeta("shopping_lists_multi_purpose_labels", "shopping_list_id", "label_id"), TableMeta("shopping_lists_multi_purpose_labels", "shopping_list_id", "label_id"),
# Foods/Units/Labels
TableMeta("ingredient_foods", "name", "group_id"),
TableMeta("ingredient_units", "name", "group_id"),
TableMeta("multi_purpose_labels", "name", "group_id"),
] ]
) )