diff --git a/frontend/api/class-interfaces/group-migrations.ts b/frontend/api/class-interfaces/group-migrations.ts new file mode 100644 index 000000000000..f2081ade87af --- /dev/null +++ b/frontend/api/class-interfaces/group-migrations.ts @@ -0,0 +1,27 @@ +import { BaseAPI } from "../_base"; +import { ReportSummary } from "./group-reports"; + +const prefix = "/api"; + +export type SupportedMigration = "nextcloud" | "chowdown"; + +export interface MigrationPayload { + migrationType: SupportedMigration; + archive: File; +} + +const routes = { + base: `${prefix}/groups/migrations`, +}; + +export class GroupMigrationApi extends BaseAPI { + async startMigration(payload: MigrationPayload) { + const form = new FormData(); + form.append("migration_type", payload.migrationType); + form.append("archive", payload.archive); + + console.log(form); + + return await this.requests.post(routes.base, form); + } +} diff --git a/frontend/api/class-interfaces/group-reports.ts b/frontend/api/class-interfaces/group-reports.ts new file mode 100644 index 000000000000..daf8b3909401 --- /dev/null +++ b/frontend/api/class-interfaces/group-reports.ts @@ -0,0 +1,49 @@ +import { BaseAPI } from "../_base"; + +const prefix = "/api"; + +export type ReportCategory = "backup" | "restore" | "migration"; + +export type SummaryStatus = "success" | "failure" | "partial" | "in-progress"; + +export interface ReportEntry { + id: string; + reportId: string; + timestamp: Date; + success: boolean; + message: string; + exception: string; +} + +export interface ReportSummary { + id: string; + timestamp: Date; + category: ReportCategory; + groupId: number; + name: string; + status: SummaryStatus; +} + +export interface Report extends ReportSummary { + entries: ReportEntry[]; +} + +const routes = { + base: `${prefix}/groups/reports`, + getOne: (id: string) => `${prefix}/groups/reports/${id}`, +}; + +export class GroupReportsApi extends BaseAPI { + async getAll(category: ReportCategory | null) { + const query = category ? `?report_type=${category}` : ""; + return await this.requests.get(routes.base + query); + } + + async getOne(id: string) { + return await this.requests.get(routes.getOne(id)); + } + + async deleteOne(id: string) { + return await this.requests.delete(routes.getOne(id)); + } +} diff --git a/frontend/api/index.ts b/frontend/api/index.ts index 92a23a69ae24..84df7eeb6fa0 100644 --- a/frontend/api/index.ts +++ b/frontend/api/index.ts @@ -19,6 +19,8 @@ import { BulkActionsAPI } from "./class-interfaces/recipe-bulk-actions"; import { GroupServerTaskAPI } from "./class-interfaces/group-tasks"; import { AdminAPI } from "./admin-api"; import { ToolsApi } from "./class-interfaces/tools"; +import { GroupMigrationApi } from "./class-interfaces/group-migrations"; +import { GroupReportsApi } from "./class-interfaces/group-reports"; import { ApiRequestInstance } from "~/types/api"; class Api { @@ -40,6 +42,8 @@ class Api { public mealplans: MealPlanAPI; public email: EmailAPI; public bulk: BulkActionsAPI; + public groupMigration: GroupMigrationApi; + public groupReports: GroupReportsApi; public grouperServerTasks: GroupServerTaskAPI; public tools: ToolsApi; // Utils @@ -67,6 +71,10 @@ class Api { this.mealplans = new MealPlanAPI(requests); this.grouperServerTasks = new GroupServerTaskAPI(requests); + // Group + this.groupMigration = new GroupMigrationApi(requests); + this.groupReports = new GroupReportsApi(requests); + // Admin this.events = new EventsAPI(requests); this.backups = new BackupAPI(requests); diff --git a/frontend/components/Domain/Recipe/RecipeComments.vue b/frontend/components/Domain/Recipe/RecipeComments.vue index 97043f4391f3..2791acb3fd8e 100644 --- a/frontend/components/Domain/Recipe/RecipeComments.vue +++ b/frontend/components/Domain/Recipe/RecipeComments.vue @@ -57,8 +57,7 @@ + + \ No newline at end of file diff --git a/frontend/components/global/ToggleState.vue b/frontend/components/global/ToggleState.vue index 02feef23e920..3546fc5f920b 100644 --- a/frontend/components/global/ToggleState.vue +++ b/frontend/components/global/ToggleState.vue @@ -6,9 +6,8 @@ + + \ No newline at end of file diff --git a/frontend/pages/user/group/data/reports/_id.vue b/frontend/pages/user/group/data/reports/_id.vue new file mode 100644 index 000000000000..b8c7644301d9 --- /dev/null +++ b/frontend/pages/user/group/data/reports/_id.vue @@ -0,0 +1,76 @@ + + + + + \ No newline at end of file diff --git a/frontend/pages/user/profile/index.vue b/frontend/pages/user/profile/index.vue index a71e7bb5261b..d8f0fe7fe3fe 100644 --- a/frontend/pages/user/profile/index.vue +++ b/frontend/pages/user/profile/index.vue @@ -117,6 +117,15 @@ Manage your recipe data and make bulk changes + + + + Migrate your existing data from other applications like Nextcloud Recipes and Chowdown + + diff --git a/frontend/static/svgs/data-reports.svg b/frontend/static/svgs/data-reports.svg new file mode 100644 index 000000000000..19fc720cc15e --- /dev/null +++ b/frontend/static/svgs/data-reports.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/static/svgs/manage-data-migrations.svg b/frontend/static/svgs/manage-data-migrations.svg new file mode 100644 index 000000000000..a6fdb1371094 --- /dev/null +++ b/frontend/static/svgs/manage-data-migrations.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/mealie/app.py b/mealie/app.py index 0cf88086184f..670ceed02d26 100644 --- a/mealie/app.py +++ b/mealie/app.py @@ -5,7 +5,7 @@ from fastapi.middleware.gzip import GZipMiddleware from mealie.core.config import get_app_settings from mealie.core.root_logger import get_logger from mealie.core.settings.static import APP_VERSION -from mealie.routes import backup_routes, migration_routes, router, utility_routes +from mealie.routes import backup_routes, router, utility_routes from mealie.routes.about import about_router from mealie.routes.handlers import register_debug_handler from mealie.routes.media import media_router @@ -51,7 +51,6 @@ def api_routers(): app.include_router(about_router) app.include_router(settings_router) app.include_router(backup_routes.router) - app.include_router(migration_routes.router) app.include_router(utility_routes.router) diff --git a/mealie/db/data_access_layer/access_model_factory.py b/mealie/db/data_access_layer/access_model_factory.py index 96fef989a902..2b856af9575b 100644 --- a/mealie/db/data_access_layer/access_model_factory.py +++ b/mealie/db/data_access_layer/access_model_factory.py @@ -3,7 +3,7 @@ from functools import cached_property from sqlalchemy.orm import Session from mealie.db.models.event import Event, EventNotification -from mealie.db.models.group import Group, GroupMealPlan +from mealie.db.models.group import Group, GroupMealPlan, ReportEntryModel, ReportModel from mealie.db.models.group.cookbook import CookBook from mealie.db.models.group.invite_tokens import GroupInviteToken from mealie.db.models.group.preferences import GroupPreferencesModel @@ -28,6 +28,7 @@ from mealie.schema.meal_plan.new_meal import ReadPlanEntry from mealie.schema.recipe import Recipe, RecipeCategoryResponse, RecipeCommentOut, RecipeTagResponse from mealie.schema.recipe.recipe_ingredient import IngredientFood, IngredientUnit from mealie.schema.recipe.recipe_tool import RecipeTool +from mealie.schema.reports.reports import ReportEntryOut, ReportOut from mealie.schema.server import ServerTask from mealie.schema.user import GroupInDB, LongLiveTokenInDB, PrivateUser, SignUpOut from mealie.schema.user.user_passwords import PrivatePasswordResetToken @@ -155,3 +156,11 @@ class Database: @cached_property def webhooks(self) -> AccessModel[ReadWebhook, GroupWebhooksModel]: return AccessModel(self.session, pk_id, GroupWebhooksModel, ReadWebhook) + + @cached_property + def group_reports(self) -> AccessModel[ReportOut, ReportModel]: + return AccessModel(self.session, pk_id, ReportModel, ReportOut) + + @cached_property + def group_report_entries(self) -> AccessModel[ReportEntryOut, ReportEntryModel]: + return AccessModel(self.session, pk_id, ReportEntryModel, ReportEntryOut) diff --git a/mealie/db/models/group/__init__.py b/mealie/db/models/group/__init__.py index 3b4d9a0ec480..1f32f64ec273 100644 --- a/mealie/db/models/group/__init__.py +++ b/mealie/db/models/group/__init__.py @@ -3,5 +3,6 @@ from .group import * from .invite_tokens import * from .mealplan import * from .preferences import * +from .report import * from .shopping_list import * from .webhooks import * diff --git a/mealie/db/models/group/group.py b/mealie/db/models/group/group.py index 21629c055f4f..2d196ba80a50 100644 --- a/mealie/db/models/group/group.py +++ b/mealie/db/models/group/group.py @@ -38,13 +38,18 @@ class Group(SqlAlchemyBase, BaseMixins): recipes = orm.relationship("RecipeModel", back_populates="group", uselist=True) # CRUD From Others - mealplans = orm.relationship( - GroupMealPlan, back_populates="group", single_parent=True, order_by="GroupMealPlan.date" - ) - webhooks = orm.relationship(GroupWebhooksModel, uselist=True, cascade="all, delete-orphan") - cookbooks = orm.relationship(CookBook, back_populates="group", single_parent=True) - server_tasks = orm.relationship(ServerTaskModel, back_populates="group", single_parent=True) - shopping_lists = orm.relationship("ShoppingList", back_populates="group", single_parent=True) + common_args = { + "back_populates": "group", + "cascade": "all, delete-orphan", + "single_parent": True, + } + + mealplans = orm.relationship(GroupMealPlan, order_by="GroupMealPlan.date", **common_args) + webhooks = orm.relationship(GroupWebhooksModel, **common_args) + cookbooks = orm.relationship(CookBook, **common_args) + server_tasks = orm.relationship(ServerTaskModel, **common_args) + shopping_lists = orm.relationship("ShoppingList", **common_args) + group_reports = orm.relationship("ReportModel", **common_args) class Config: exclude = {"users", "webhooks", "shopping_lists", "cookbooks", "preferences", "invite_tokens", "mealplans"} diff --git a/mealie/db/models/group/report.py b/mealie/db/models/group/report.py new file mode 100644 index 000000000000..8b2758420483 --- /dev/null +++ b/mealie/db/models/group/report.py @@ -0,0 +1,50 @@ +from datetime import datetime +from uuid import uuid4 + +from sqlalchemy import Column, ForeignKey, Integer, orm +from sqlalchemy.sql.sqltypes import Boolean, DateTime, String + +from mealie.db.models._model_base import BaseMixins, SqlAlchemyBase + +from .._model_utils import auto_init +from .._model_utils.guid import GUID + + +class ReportEntryModel(SqlAlchemyBase, BaseMixins): + __tablename__ = "report_entries" + id = Column(GUID(), primary_key=True, default=uuid4) + + success = Column(Boolean, default=False) + message = Column(String, nullable=True) + exception = Column(String, nullable=True) + timestamp = Column(DateTime, nullable=False, default=datetime.utcnow) + + report_id = Column(GUID(), ForeignKey("group_reports.id"), nullable=False) + report = orm.relationship("ReportModel", back_populates="entries") + + @auto_init() + def __init__(self, **_) -> None: + pass + + +class ReportModel(SqlAlchemyBase, BaseMixins): + __tablename__ = "group_reports" + id = Column(GUID(), primary_key=True, default=uuid4) + + name = Column(String, nullable=False) + status = Column(String, nullable=False) + category = Column(String, index=True, nullable=False) + timestamp = Column(DateTime, nullable=False, default=datetime.utcnow) + + entries = orm.relationship(ReportEntryModel, back_populates="report", cascade="all, delete-orphan") + + # Relationships + group_id = Column(Integer, ForeignKey("groups.id")) + group = orm.relationship("Group", back_populates="group_reports", single_parent=True) + + class Config: + exclude = ["entries"] + + @auto_init() + def __init__(self, **_) -> None: + pass diff --git a/mealie/db/models/group/webhooks.py b/mealie/db/models/group/webhooks.py index 33194114a054..f23be07d5015 100644 --- a/mealie/db/models/group/webhooks.py +++ b/mealie/db/models/group/webhooks.py @@ -1,4 +1,4 @@ -from sqlalchemy import Boolean, Column, ForeignKey, Integer, String +from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, orm from mealie.db.models._model_base import BaseMixins, SqlAlchemyBase @@ -8,6 +8,8 @@ from .._model_utils import auto_init class GroupWebhooksModel(SqlAlchemyBase, BaseMixins): __tablename__ = "webhook_urls" id = Column(Integer, primary_key=True) + + group = orm.relationship("Group", back_populates="webhooks", single_parent=True) group_id = Column(Integer, ForeignKey("groups.id"), index=True) enabled = Column(Boolean, default=False) diff --git a/mealie/routes/groups/__init__.py b/mealie/routes/groups/__init__.py index 748f93a573e1..0260428a8c25 100644 --- a/mealie/routes/groups/__init__.py +++ b/mealie/routes/groups/__init__.py @@ -2,11 +2,13 @@ from datetime import date, timedelta from fastapi import APIRouter, Depends +from mealie.schema.reports.reports import ReportCategory from mealie.services._base_http_service import RouterFactory from mealie.services.group_services import CookbookService, WebhookService from mealie.services.group_services.meal_service import MealService +from mealie.services.group_services.reports_service import GroupReportService -from . import categories, invitations, preferences, self_service +from . import categories, invitations, migrations, preferences, self_service router = APIRouter() @@ -38,3 +40,16 @@ router.include_router(categories.user_router) router.include_router(webhook_router) router.include_router(invitations.router, prefix="/groups/invitations", tags=["Groups: Invitations"]) router.include_router(preferences.router, prefix="/groups/preferences", tags=["Group: Preferences"]) +router.include_router(migrations.router, prefix="/groups/migrations", tags=["Group: Migrations"]) + +report_router = RouterFactory(service=GroupReportService, prefix="/groups/reports", tags=["Groups: Reports"]) + + +@report_router.get("") +def get_all_reports( + report_type: ReportCategory = None, gm_service: GroupReportService = Depends(GroupReportService.private) +): + return gm_service._get_all(report_type) + + +router.include_router(report_router) diff --git a/mealie/routes/groups/migrations.py b/mealie/routes/groups/migrations.py new file mode 100644 index 000000000000..6ccdb9c0d75e --- /dev/null +++ b/mealie/routes/groups/migrations.py @@ -0,0 +1,26 @@ +import shutil + +from fastapi import Depends, File, Form +from fastapi.datastructures import UploadFile + +from mealie.core.dependencies import temporary_zip_path +from mealie.routes.routers import UserAPIRouter +from mealie.schema.group.group_migration import SupportedMigrations +from mealie.schema.reports.reports import ReportSummary +from mealie.services.group_services.migration_service import GroupMigrationService + +router = UserAPIRouter() + + +@router.post("", response_model=ReportSummary) +def start_data_migration( + migration_type: SupportedMigrations = Form(...), + archive: UploadFile = File(...), + temp_path: str = Depends(temporary_zip_path), + gm_service: GroupMigrationService = Depends(GroupMigrationService.private), +): + # Save archive to temp_path + with temp_path.open("wb") as buffer: + shutil.copyfileobj(archive.file, buffer) + + return gm_service.migrate(migration_type, temp_path) diff --git a/mealie/routes/migration_routes.py b/mealie/routes/migration_routes.py deleted file mode 100644 index 9049dd2fcd69..000000000000 --- a/mealie/routes/migration_routes.py +++ /dev/null @@ -1,79 +0,0 @@ -import operator -import shutil -from typing import List - -from fastapi import Depends, File, HTTPException, UploadFile, status -from sqlalchemy.orm.session import Session - -from mealie.core.config import get_app_dirs - -app_dirs = get_app_dirs() -from mealie.db.db_setup import generate_session -from mealie.routes.routers import AdminAPIRouter -from mealie.routes.users.crud import get_logged_in_user -from mealie.schema.admin import MigrationFile, Migrations -from mealie.schema.user.user import PrivateUser -from mealie.services.migrations import migration - -router = AdminAPIRouter(prefix="/api/migrations", tags=["Migration"]) - - -@router.get("", response_model=List[Migrations]) -def get_all_migration_options(): - """Returns a list of avaiable directories that can be imported into Mealie""" - response_data = [] - migration_dirs = [ - app_dirs.MIGRATION_DIR.joinpath("nextcloud"), - app_dirs.MIGRATION_DIR.joinpath("chowdown"), - ] - for directory in migration_dirs: - migration = Migrations(type=directory.stem) - for zip in directory.iterdir(): - if zip.suffix == ".zip": - migration_zip = MigrationFile(name=zip.name, date=zip.stat().st_ctime) - migration.files.append(migration_zip) - response_data.append(migration) - - migration.files.sort(key=operator.attrgetter("date"), reverse=True) - - return response_data - - -@router.post("/{import_type}/{file_name}/import") -def import_migration( - import_type: migration.Migration, - file_name: str, - session: Session = Depends(generate_session), - user: PrivateUser = Depends(get_logged_in_user), -): - """Imports all the recipes in a given directory""" - file_path = app_dirs.MIGRATION_DIR.joinpath(import_type.value, file_name) - return migration.migrate(user, import_type, file_path, session) - - -@router.delete("/{import_type}/{file_name}/delete", status_code=status.HTTP_200_OK) -def delete_migration_data(import_type: migration.Migration, file_name: str): - """Removes migration data from the file system""" - - remove_path = app_dirs.MIGRATION_DIR.joinpath(import_type.value, file_name) - - if remove_path.is_file(): - remove_path.unlink() - elif remove_path.is_dir(): - shutil.rmtree(remove_path) - else: - raise HTTPException(status.HTTP_400_BAD_REQUEST) - - -@router.post("/{import_type}/upload", status_code=status.HTTP_200_OK) -def upload_nextcloud_zipfile(import_type: migration.Migration, archive: UploadFile = File(...)): - """Upload a .zip File to later be imported into Mealie""" - dir = app_dirs.MIGRATION_DIR.joinpath(import_type.value) - dir.mkdir(parents=True, exist_ok=True) - dest = dir.joinpath(archive.filename) - - with dest.open("wb") as buffer: - shutil.copyfileobj(archive.file, buffer) - - if not dest.is_file: - raise HTTPException(status.HTTP_400_BAD_REQUEST) diff --git a/mealie/schema/group/group_migration.py b/mealie/schema/group/group_migration.py new file mode 100644 index 000000000000..ab7f4c209f92 --- /dev/null +++ b/mealie/schema/group/group_migration.py @@ -0,0 +1,12 @@ +import enum + +from fastapi_camelcase import CamelModel + + +class SupportedMigrations(str, enum.Enum): + nextcloud = "nextcloud" + chowdown = "chowdown" + + +class DataMigrationCreate(CamelModel): + source_type: SupportedMigrations diff --git a/mealie/schema/reports/__init__.py b/mealie/schema/reports/__init__.py new file mode 100644 index 000000000000..e063b17c02bd --- /dev/null +++ b/mealie/schema/reports/__init__.py @@ -0,0 +1 @@ +from .reports import * diff --git a/mealie/schema/reports/reports.py b/mealie/schema/reports/reports.py new file mode 100644 index 000000000000..4d10061c8df9 --- /dev/null +++ b/mealie/schema/reports/reports.py @@ -0,0 +1,53 @@ +import datetime +import enum + +from fastapi_camelcase import CamelModel +from pydantic import Field +from pydantic.types import UUID4 + + +class ReportCategory(str, enum.Enum): + backup = "backup" + restore = "restore" + migration = "migration" + + +class ReportSummaryStatus(str, enum.Enum): + in_progress = "in-progress" + success = "success" + failure = "failure" + partial = "partial" + + +class ReportEntryCreate(CamelModel): + report_id: UUID4 + timestamp: datetime.datetime = Field(default_factory=datetime.datetime.utcnow) + success: bool = True + message: str + exception: str = "" + + +class ReportEntryOut(ReportEntryCreate): + id: UUID4 + + class Config: + orm_mode = True + + +class ReportCreate(CamelModel): + timestamp: datetime.datetime = Field(default_factory=datetime.datetime.utcnow) + category: ReportCategory + group_id: int + name: str + status: ReportSummaryStatus = ReportSummaryStatus.in_progress + + +class ReportSummary(ReportCreate): + id: UUID4 + + +class ReportOut(ReportSummary): + entries: list[ReportEntryOut] = [] + + class Config: + orm_mode = True diff --git a/mealie/services/group_services/migration_service.py b/mealie/services/group_services/migration_service.py new file mode 100644 index 000000000000..b184a1417945 --- /dev/null +++ b/mealie/services/group_services/migration_service.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +from functools import cached_property +from pathlib import Path + +from pydantic.types import UUID4 + +from mealie.core.root_logger import get_logger +from mealie.schema.group.group_migration import SupportedMigrations +from mealie.schema.reports.reports import ReportOut, ReportSummary +from mealie.services._base_http_service.http_services import UserHttpService +from mealie.services.events import create_group_event +from mealie.services.migrations import ChowdownMigrator, NextcloudMigrator + +logger = get_logger(module=__name__) + + +class GroupMigrationService(UserHttpService[int, ReportOut]): + event_func = create_group_event + _restrict_by_group = True + _schema = ReportOut + + @cached_property + def dal(self): + raise NotImplementedError + + def populate_item(self, id: UUID4) -> ReportOut: + return None + + def migrate(self, migration: SupportedMigrations, archive: Path) -> ReportSummary: + if migration == SupportedMigrations.nextcloud: + self.migration_type = NextcloudMigrator(archive, self.db, self.session, self.user.id, self.group_id) + + if migration == SupportedMigrations.chowdown: + self.migration_type = ChowdownMigrator(archive, self.db, self.session, self.user.id, self.group_id) + + return self.migration_type.migrate(f"{migration.value.title()} Migration") diff --git a/mealie/services/group_services/reports_service.py b/mealie/services/group_services/reports_service.py new file mode 100644 index 000000000000..e4b2eb2ad843 --- /dev/null +++ b/mealie/services/group_services/reports_service.py @@ -0,0 +1,31 @@ +from __future__ import annotations + +from functools import cached_property + +from mealie.core.root_logger import get_logger +from mealie.schema.reports.reports import ReportCategory, ReportCreate, ReportOut, ReportSummary +from mealie.services._base_http_service.crud_http_mixins import CrudHttpMixins +from mealie.services._base_http_service.http_services import UserHttpService +from mealie.services.events import create_group_event + +logger = get_logger(module=__name__) + + +class GroupReportService(CrudHttpMixins[ReportOut, ReportCreate, ReportCreate], UserHttpService[int, ReportOut]): + event_func = create_group_event + _restrict_by_group = True + _schema = ReportOut + + @cached_property + def dal(self): + return self.db.group_reports + + def populate_item(self, id: int) -> ReportOut: + self.item = self.dal.get_one(id) + return self.item + + def _get_all(self, report_type: ReportCategory = None) -> list[ReportSummary]: + return self.dal.multi_query({"group_id": self.group_id, "category": report_type}, limit=9999) + + def delete_one(self, id: int = None) -> ReportOut: + return self._delete_one(id) diff --git a/mealie/services/migrations/__init__.py b/mealie/services/migrations/__init__.py index e69de29bb2d1..11d099ec8e86 100644 --- a/mealie/services/migrations/__init__.py +++ b/mealie/services/migrations/__init__.py @@ -0,0 +1,2 @@ +from .chowdown import * +from .nextcloud import * diff --git a/mealie/services/migrations/_migration_base.py b/mealie/services/migrations/_migration_base.py index f39ea2fd1901..a586b12e0e55 100644 --- a/mealie/services/migrations/_migration_base.py +++ b/mealie/services/migrations/_migration_base.py @@ -1,122 +1,134 @@ -import json from pathlib import Path -from tempfile import TemporaryDirectory -from typing import Any, Callable, Optional - -import yaml -from pydantic import BaseModel +from typing import Tuple from mealie.core import root_logger -from mealie.db.database import get_database -from mealie.schema.admin import MigrationImport +from mealie.db.database import Database from mealie.schema.recipe import Recipe -from mealie.schema.user.user import PrivateUser -from mealie.services.image import image +from mealie.schema.reports.reports import ( + ReportCategory, + ReportCreate, + ReportEntryCreate, + ReportOut, + ReportSummary, + ReportSummaryStatus, +) from mealie.services.scraper import cleaner -from mealie.utils.unzip import unpack_zip -logger = root_logger.get_logger() +from .._base_service import BaseService +from .utils.migration_alias import MigrationAlias -class MigrationAlias(BaseModel): - """A datatype used by MigrationBase to pre-process a recipe dictionary to rewrite - the alias key in the dictionary, if it exists, to the key. If set a `func` attribute - will be called on the value before assigning the value to the new key - """ +class BaseMigrator(BaseService): + key_aliases: list[MigrationAlias] - key: str - alias: str - func: Optional[Callable] = None + report_entries: list[ReportEntryCreate] + report_id: int + report: ReportOut + def __init__(self, archive: Path, db: Database, session, user_id: int, group_id: int): + self.archive = archive + self.db = db + self.session = session + self.user_id = user_id + self.group_id = group_id -class MigrationBase(BaseModel): - migration_report: list[MigrationImport] = [] - migration_file: Path - session: Optional[Any] - key_aliases: Optional[list[MigrationAlias]] + self.report_entries = [] - user: PrivateUser + self.logger = root_logger.get_logger() - @property - def db(self): - return get_database(self.session) + super().__init__() - @property - def temp_dir(self) -> TemporaryDirectory: - """unpacks the migration_file into a temporary directory - that can be used as a context manager. + def _migrate(self) -> None: + raise NotImplementedError - Returns: - TemporaryDirectory: + def _create_report(self, report_name: str) -> None: + report_to_save = ReportCreate( + name=report_name, + category=ReportCategory.migration, + status=ReportSummaryStatus.in_progress, + group_id=self.group_id, + ) + + self.report = self.db.group_reports.create(report_to_save) + self.report_id = self.report.id + + def _save_all_entries(self) -> None: + + is_success = True + is_failure = True + + for entry in self.report_entries: + if is_failure and entry.success: + is_failure = False + + if is_success and not entry.success: + is_success = False + + self.db.group_report_entries.create(entry) + + if is_success: + self.report.status = ReportSummaryStatus.success + + if is_failure: + self.report.status = ReportSummaryStatus.failure + + if not is_success and not is_failure: + self.report.status = ReportSummaryStatus.partial + + self.db.group_reports.update(self.report.id, self.report) + + def migrate(self, report_name: str) -> ReportSummary: + self._create_report(report_name) + self._migrate() + self._save_all_entries() + return self.db.group_reports.get(self.report_id) + + def import_recipes_to_database(self, validated_recipes: list[Recipe]) -> list[Tuple[str, bool]]: """ - return unpack_zip(self.migration_file) - - @staticmethod - def json_reader(json_file: Path) -> dict: - with open(json_file, "r") as f: - return json.loads(f.read()) - - @staticmethod - def yaml_reader(yaml_file: Path) -> dict: - """A helper function to read in a yaml file from a Path. This assumes that the - first yaml document is the recipe data and the second, if exists, is the description. + Used as a single access point to process a list of Recipe objects into the + database in a predictable way. If an error occurs the session is rolled back + and the process will continue. All import information is appended to the + 'migration_report' attribute to be returned to the frontend for display. Args: - yaml_file (Path): Path to yaml file - - Returns: - dict: representing the yaml file as a dictionary + validated_recipes (list[Recipe]): """ - with open(yaml_file, "r") as f: - contents = f.read().split("---") - recipe_data = {} - for _, document in enumerate(contents): - # Check if None or Empty String - if document is None or document == "": - continue + return_vars = [] - # Check if 'title:' present - elif "title:" in document: - recipe_data.update(yaml.safe_load(document)) + for recipe in validated_recipes: - else: - recipe_data["description"] = document + recipe.user_id = self.user_id + recipe.group_id = self.group_id - return recipe_data + exception = "" + status = False + try: + self.db.recipes.create(recipe) + status = True - @staticmethod - def glob_walker(directory: Path, glob_str: str, return_parent=True) -> list[Path]: # TODO: - """A Helper function that will return the glob matches for the temporary directotry - that was unpacked and passed in as the `directory` parameter. If `return_parent` is - True the return Paths will be the parent directory for the file that was matched. If - false the file itself will be returned. + except Exception as inst: + exception = inst + self.logger.exception(inst) + self.session.rollback() - Args: - directory (Path): Path to search directory - glob_str ([type]): glob style match string - return_parent (bool, optional): To return parent directory of match. Defaults to True. - - Returns: - list[Path]: - """ - directory = directory if isinstance(directory, Path) else Path(directory) - matches = [] - for match in directory.glob(glob_str): - if return_parent: - matches.append(match.parent) + if status: + message = f"Imported {recipe.name} successfully" else: - matches.append(match) + message = f"Failed to import {recipe.name}" - return matches + return_vars.append((recipe.slug, status)) - @staticmethod - def import_image(src: Path, dest_slug: str): - """Read the successful migrations attribute and for each import the image - appropriately into the image directory. Minification is done in mass - after the migration occurs. - """ - image.write_image(dest_slug, src, extension=src.suffix) + self.report_entries.append( + ReportEntryCreate( + report_id=self.report_id, + success=status, + message=message, + exception=str(exception), + ) + ) + + return return_vars def rewrite_alias(self, recipe_dict: dict) -> dict: """A helper function to reassign attributes by an alias using a list @@ -137,7 +149,6 @@ class MigrationBase(BaseModel): try: prop_value = recipe_dict.pop(alias.alias) except KeyError: - logger.info(f"Key {alias.alias} Not Found. Skipping...") continue if alias.func: @@ -147,7 +158,7 @@ class MigrationBase(BaseModel): return recipe_dict - def clean_recipe_dictionary(self, recipe_dict) -> Recipe: + def clean_recipe_dictionary(self, recipe_dict: dict) -> Recipe: """ Calls the rewrite_alias function and the Cleaner.clean function on a dictionary and returns the result unpacked into a Recipe object @@ -156,33 +167,3 @@ class MigrationBase(BaseModel): recipe_dict = cleaner.clean(recipe_dict, url=recipe_dict.get("org_url", None)) return Recipe(**recipe_dict) - - def import_recipes_to_database(self, validated_recipes: list[Recipe]) -> None: - """ - Used as a single access point to process a list of Recipe objects into the - database in a predictable way. If an error occurs the session is rolled back - and the process will continue. All import information is appended to the - 'migration_report' attribute to be returned to the frontend for display. - - Args: - validated_recipes (list[Recipe]): - """ - - for recipe in validated_recipes: - - recipe.user_id = self.user.id - recipe.group_id = self.user.group_id - - exception = "" - status = False - try: - self.db.recipes.create(recipe.dict()) - status = True - - except Exception as inst: - exception = inst - logger.exception(inst) - self.session.rollback() - - import_status = MigrationImport(slug=recipe.slug, name=recipe.name, status=status, exception=str(exception)) - self.migration_report.append(import_status) diff --git a/mealie/services/migrations/chowdown.py b/mealie/services/migrations/chowdown.py index 001db66a5bf2..726639a8daaf 100644 --- a/mealie/services/migrations/chowdown.py +++ b/mealie/services/migrations/chowdown.py @@ -1,50 +1,50 @@ +import tempfile +import zipfile from pathlib import Path -from typing import Optional -from sqlalchemy.orm.session import Session +from mealie.db.database import Database -from mealie.core.config import get_app_dirs - -app_dirs = get_app_dirs() -from mealie.schema.admin import MigrationImport -from mealie.schema.user.user import PrivateUser -from mealie.services.migrations import helpers -from mealie.services.migrations._migration_base import MigrationAlias, MigrationBase +from ._migration_base import BaseMigrator +from .utils.migration_alias import MigrationAlias +from .utils.migration_helpers import MigrationReaders, import_image, split_by_comma -class ChowdownMigration(MigrationBase): - key_aliases: Optional[list[MigrationAlias]] = [ - MigrationAlias(key="name", alias="title", func=None), - MigrationAlias(key="recipeIngredient", alias="ingredients", func=None), - MigrationAlias(key="recipeInstructions", alias="directions", func=None), - MigrationAlias(key="tags", alias="tags", func=helpers.split_by_comma), - ] +class ChowdownMigrator(BaseMigrator): + def __init__(self, archive: Path, db: Database, session, user_id: int, group_id: int): + super().__init__(archive, db, session, user_id, group_id) + self.key_aliases = [ + MigrationAlias(key="name", alias="title", func=None), + MigrationAlias(key="recipeIngredient", alias="ingredients", func=None), + MigrationAlias(key="recipeInstructions", alias="directions", func=None), + MigrationAlias(key="tags", alias="tags", func=split_by_comma), + ] -def migrate(user: PrivateUser, session: Session, zip_path: Path) -> list[MigrationImport]: - cd_migration = ChowdownMigration(user=user, migration_file=zip_path, session=session) + def _migrate(self) -> None: + with tempfile.TemporaryDirectory() as tmpdir: + with zipfile.ZipFile(self.archive) as zip_file: + zip_file.extractall(tmpdir) - with cd_migration.temp_dir as dir: - chow_dir = next(Path(dir).iterdir()) - image_dir = app_dirs.TEMP_DIR.joinpath(chow_dir, "images") - recipe_dir = app_dirs.TEMP_DIR.joinpath(chow_dir, "_recipes") + temp_path = Path(tmpdir) - recipes_as_dicts = [y for x in recipe_dir.glob("*.md") if (y := ChowdownMigration.yaml_reader(x)) is not None] + chow_dir = next(temp_path.iterdir()) + image_dir = temp_path.joinpath(chow_dir, "images") + recipe_dir = temp_path.joinpath(chow_dir, "_recipes") - recipes = [cd_migration.clean_recipe_dictionary(x) for x in recipes_as_dicts] + recipes_as_dicts = [y for x in recipe_dir.glob("*.md") if (y := MigrationReaders.yaml(x)) is not None] - cd_migration.import_recipes_to_database(recipes) + recipes = [self.clean_recipe_dictionary(x) for x in recipes_as_dicts] - recipe_lookup = {r.slug: r for r in recipes} + results = self.import_recipes_to_database(recipes) - for report in cd_migration.migration_report: - if report.status: - try: - original_image = recipe_lookup.get(report.slug).image - cd_image = image_dir.joinpath(original_image) - except StopIteration: - continue - if cd_image: - ChowdownMigration.import_image(cd_image, report.slug) + recipe_lookup = {r.slug: r for r in recipes} - return cd_migration.migration_report + for slug, status in results: + if status: + try: + original_image = recipe_lookup.get(slug).image + cd_image = image_dir.joinpath(original_image) + except StopIteration: + continue + if cd_image: + import_image(cd_image, slug) diff --git a/mealie/services/migrations/helpers.py b/mealie/services/migrations/helpers.py deleted file mode 100644 index b7bf4f5b242f..000000000000 --- a/mealie/services/migrations/helpers.py +++ /dev/null @@ -1,12 +0,0 @@ -def split_by_comma(tag_string: str): - """Splits a single string by ',' performs a line strip and then title cases the resulting string - - Args: - tag_string (str): [description] - - Returns: - [type]: [description] - """ - if not isinstance(tag_string, str): - return None - return [x.title().lstrip() for x in tag_string.split(",") if x != ""] diff --git a/mealie/services/migrations/migration.py b/mealie/services/migrations/migration.py deleted file mode 100644 index fcead17c5d70..000000000000 --- a/mealie/services/migrations/migration.py +++ /dev/null @@ -1,50 +0,0 @@ -from enum import Enum -from pathlib import Path - -from sqlalchemy.orm.session import Session - -from mealie.core import root_logger -from mealie.schema.admin import MigrationImport -from mealie.services.migrations import chowdown, nextcloud - -logger = root_logger.get_logger() - - -class Migration(str, Enum): - """The class defining the supported types of migrations for Mealie. Pass the - class attribute of the class instead of the string when using. - """ - - nextcloud = "nextcloud" - chowdown = "chowdown" - - -def migrate(user, migration_type: str, file_path: Path, session: Session) -> list[MigrationImport]: - """The new entry point for accessing migrations within the 'migrations' service. - Using the 'Migrations' enum class as a selector for migration_type to direct which function - to call. All migrations will return a MigrationImport object that is built for displaying - detailed information on the frontend. This will provide a single point of access - - Args: - migration_type (str): a string option representing the migration type. See Migration attributes for options - file_path (Path): Path to the zip file containing the data - session (Session): a SqlAlchemy Session - - Returns: - list[MigrationImport]: [description] - """ - - logger.info(f"Starting Migration from {migration_type}") - - if migration_type == Migration.nextcloud.value: - migration_imports = nextcloud.migrate(user, session, file_path) - - elif migration_type == Migration.chowdown.value: - migration_imports = chowdown.migrate(user, session, file_path) - - else: - return [] - - logger.info(f"Finishing Migration from {migration_type}") - - return migration_imports diff --git a/mealie/services/migrations/nextcloud.py b/mealie/services/migrations/nextcloud.py index dd74b28361ac..589e3f33f746 100644 --- a/mealie/services/migrations/nextcloud.py +++ b/mealie/services/migrations/nextcloud.py @@ -1,14 +1,16 @@ +import tempfile +import zipfile from dataclasses import dataclass from pathlib import Path from typing import Optional from slugify import slugify -from sqlalchemy.orm.session import Session -from mealie.schema.admin import MigrationImport -from mealie.schema.user.user import PrivateUser -from mealie.services.migrations import helpers -from mealie.services.migrations._migration_base import MigrationAlias, MigrationBase +from mealie.db.database import Database + +from ._migration_base import BaseMigrator +from .utils.migration_alias import MigrationAlias +from .utils.migration_helpers import MigrationReaders, glob_walker, import_image, split_by_comma @dataclass @@ -33,39 +35,38 @@ class NextcloudDir: except StopIteration: image_file = None - return cls(name=dir.name, recipe=NextcloudMigration.json_reader(json_file), image=image_file) + return cls(name=dir.name, recipe=MigrationReaders.json(json_file), image=image_file) -class NextcloudMigration(MigrationBase): - key_aliases: Optional[list[MigrationAlias]] = [ - MigrationAlias(key="tags", alias="keywords", func=helpers.split_by_comma), - MigrationAlias(key="org_url", alias="url", func=None), - ] +class NextcloudMigrator(BaseMigrator): + def __init__(self, archive: Path, db: Database, session, user_id: int, group_id: int): + super().__init__(archive, db, session, user_id, group_id) + self.key_aliases = [ + MigrationAlias(key="tags", alias="keywords", func=split_by_comma), + MigrationAlias(key="org_url", alias="url", func=None), + ] -def migrate(user: PrivateUser, session: Session, zip_path: Path) -> list[MigrationImport]: + def _migrate(self) -> None: + # Unzip File into temp directory - nc_migration = NextcloudMigration(user=user, migration_file=zip_path, session=session) + # get potential recipe dirs + with tempfile.TemporaryDirectory() as tmpdir: + with zipfile.ZipFile(self.archive) as zip_file: + zip_file.extractall(tmpdir) - with nc_migration.temp_dir as dir: - potential_recipe_dirs = NextcloudMigration.glob_walker(dir, glob_str="**/[!.]*.json", return_parent=True) + potential_recipe_dirs = glob_walker(Path(tmpdir), glob_str="**/[!.]*.json", return_parent=True) + nextcloud_dirs = {y.slug: y for x in potential_recipe_dirs if (y := NextcloudDir.from_dir(x))} - # nextcloud_dirs = [NextcloudDir.from_dir(x) for x in potential_recipe_dirs] - nextcloud_dirs = {y.slug: y for x in potential_recipe_dirs if (y := NextcloudDir.from_dir(x))} - # nextcloud_dirs = {x.slug: x for x in nextcloud_dirs} + all_recipes = [] + for _, nc_dir in nextcloud_dirs.items(): + recipe = self.clean_recipe_dictionary(nc_dir.recipe) + all_recipes.append(recipe) - all_recipes = [] - for _, nc_dir in nextcloud_dirs.items(): - recipe = nc_migration.clean_recipe_dictionary(nc_dir.recipe) - all_recipes.append(recipe) + all_statuses = self.import_recipes_to_database(all_recipes) - nc_migration.import_recipes_to_database(all_recipes) - - for report in nc_migration.migration_report: - - if report.status: - nc_dir: NextcloudDir = nextcloud_dirs[report.slug] - if nc_dir.image: - NextcloudMigration.import_image(nc_dir.image, nc_dir.slug) - - return nc_migration.migration_report + for slug, status in all_statuses: + if status: + nc_dir: NextcloudDir = nextcloud_dirs[slug] + if nc_dir.image: + import_image(nc_dir.image, nc_dir.slug) diff --git a/mealie/services/migrations/utils/__init__.py b/mealie/services/migrations/utils/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/mealie/services/migrations/utils/migration_alias.py b/mealie/services/migrations/utils/migration_alias.py new file mode 100644 index 000000000000..7103a129f73e --- /dev/null +++ b/mealie/services/migrations/utils/migration_alias.py @@ -0,0 +1,14 @@ +from typing import Callable, Optional + +from pydantic import BaseModel + + +class MigrationAlias(BaseModel): + """A datatype used by MigrationBase to pre-process a recipe dictionary to rewrite + the alias key in the dictionary, if it exists, to the key. If set a `func` attribute + will be called on the value before assigning the value to the new key + """ + + key: str + alias: str + func: Optional[Callable] = None diff --git a/mealie/services/migrations/utils/migration_helpers.py b/mealie/services/migrations/utils/migration_helpers.py new file mode 100644 index 000000000000..526a796f55fa --- /dev/null +++ b/mealie/services/migrations/utils/migration_helpers.py @@ -0,0 +1,89 @@ +import json +from pathlib import Path + +import yaml + +from mealie.services.image import image + + +class MigrationReaders: + @staticmethod + def json(json_file: Path) -> dict: + with open(json_file, "r") as f: + return json.loads(f.read()) + + @staticmethod + def yaml(yaml_file: Path) -> dict: + """A helper function to read in a yaml file from a Path. This assumes that the + first yaml document is the recipe data and the second, if exists, is the description. + + Args: + yaml_file (Path): Path to yaml file + + Returns: + dict: representing the yaml file as a dictionary + """ + with open(yaml_file, "r") as f: + contents = f.read().split("---") + recipe_data = {} + for _, document in enumerate(contents): + + # Check if None or Empty String + if document is None or document == "": + continue + + # Check if 'title:' present + elif "title:" in document: + recipe_data.update(yaml.safe_load(document)) + + else: + recipe_data["description"] = document + + return recipe_data + + +def split_by_comma(tag_string: str): + """Splits a single string by ',' performs a line strip and then title cases the resulting string + + Args: + tag_string (str): [description] + + Returns: + [type]: [description] + """ + if not isinstance(tag_string, str): + return None + return [x.title().lstrip() for x in tag_string.split(",") if x != ""] + + +def glob_walker(directory: Path, glob_str: str, return_parent=True) -> list[Path]: # TODO: + """A Helper function that will return the glob matches for the temporary directotry + that was unpacked and passed in as the `directory` parameter. If `return_parent` is + True the return Paths will be the parent directory for the file that was matched. If + false the file itself will be returned. + + Args: + directory (Path): Path to search directory + glob_str ([type]): glob style match string + return_parent (bool, optional): To return parent directory of match. Defaults to True. + + Returns: + list[Path]: + """ + directory = directory if isinstance(directory, Path) else Path(directory) + matches = [] + for match in directory.glob(glob_str): + if return_parent: + matches.append(match.parent) + else: + matches.append(match) + + return matches + + +def import_image(src: Path, dest_slug: str): + """Read the successful migrations attribute and for each import the image + appropriately into the image directory. Minification is done in mass + after the migration occurs. + """ + image.write_image(dest_slug, src, extension=src.suffix) diff --git a/mealie/utils/unzip.py b/mealie/utils/unzip.py deleted file mode 100644 index 1a2c9dd886b8..000000000000 --- a/mealie/utils/unzip.py +++ /dev/null @@ -1,21 +0,0 @@ -import tempfile -import zipfile -from pathlib import Path - -from mealie.core.config import get_app_dirs - -app_dirs = get_app_dirs() - - -def unpack_zip(selection: Path) -> tempfile.TemporaryDirectory: - app_dirs.TEMP_DIR.mkdir(parents=True, exist_ok=True) - temp_dir = tempfile.TemporaryDirectory(dir=app_dirs.TEMP_DIR) - temp_dir_path = Path(temp_dir.name) - if selection.suffix == ".zip": - with zipfile.ZipFile(selection, "r") as zip_ref: - zip_ref.extractall(path=temp_dir_path) - - else: - raise Exception("File is not a zip file") - - return temp_dir diff --git a/tests/integration_tests/test_migration_routes.py b/tests/integration_tests/test_migration_routes.py index f5bdc1512e6f..ba3a01fa6095 100644 --- a/tests/integration_tests/test_migration_routes.py +++ b/tests/integration_tests/test_migration_routes.py @@ -1,105 +1,49 @@ -import json -import shutil from pathlib import Path import pytest from fastapi.testclient import TestClient -from mealie.core.config import get_app_dirs - -app_dirs = get_app_dirs() from tests.test_config import TEST_CHOWDOWN_DIR, TEST_NEXTCLOUD_DIR -from tests.utils.app_routes import AppRoutes +from tests.utils.fixture_schemas import TestUser -@pytest.fixture(scope="session") -def chowdown_zip(): - zip = TEST_CHOWDOWN_DIR.joinpath("test_chowdown-gh-pages.zip") +class Routes: + base = "/api/groups/migrations" - zip_copy = TEST_CHOWDOWN_DIR.joinpath("chowdown-gh-pages.zip") - - shutil.copy(zip, zip_copy) - - yield zip_copy - - zip_copy.unlink() + @staticmethod + def report(item_id: str) -> str: + return f"/api/groups/reports/{item_id}" -def test_upload_chowdown_zip(api_client: TestClient, api_routes: AppRoutes, chowdown_zip: Path, admin_token): - upload_url = api_routes.migrations_import_type_upload("chowdown") - response = api_client.post(upload_url, files={"archive": chowdown_zip.open("rb")}, headers=admin_token) +@pytest.mark.parametrize( + "m_type, zip_path", + [ + ("nextcloud", TEST_NEXTCLOUD_DIR.joinpath("nextcloud.zip")), + ("chowdown", TEST_CHOWDOWN_DIR.joinpath("test_chowdown-gh-pages.zip")), + ], +) +def test_migration_nextcloud(api_client: TestClient, zip_path: Path, m_type: str, unique_user: TestUser): + payload = { + "archive": zip_path.read_bytes(), + } + + data = { + "migration_type": m_type, + } + + response = api_client.post(Routes.base, data=data, files=payload, headers=unique_user.token) assert response.status_code == 200 - assert app_dirs.MIGRATION_DIR.joinpath("chowdown", chowdown_zip.name).is_file() + id = response.json()["id"] - -def test_import_chowdown_directory(api_client: TestClient, api_routes: AppRoutes, chowdown_zip: Path, admin_token): - delete_url = api_routes.recipes_recipe_slug("roasted-okra") - api_client.delete(delete_url, headers=admin_token) # TODO: Manage Test Data better - selection = chowdown_zip.name - - import_url = api_routes.migrations_import_type_file_name_import("chowdown", selection) - response = api_client.post(import_url, headers=admin_token) + response = api_client.get(Routes.report(id), headers=unique_user.token) assert response.status_code == 200 - reports = json.loads(response.content) + report = response.json() - for report in reports: - assert report.get("status") is True + assert report.get("status") == "success" - -def test_delete_chowdown_migration_data(api_client: TestClient, api_routes: AppRoutes, chowdown_zip: Path, admin_token): - selection = chowdown_zip.name - delete_url = api_routes.migrations_import_type_file_name_delete("chowdown", selection) - response = api_client.delete(delete_url, headers=admin_token) - - assert response.status_code == 200 - assert not app_dirs.MIGRATION_DIR.joinpath(chowdown_zip.name).is_file() - - -# Nextcloud -@pytest.fixture(scope="session") -def nextcloud_zip(): - zip = TEST_NEXTCLOUD_DIR.joinpath("nextcloud.zip") - - zip_copy = TEST_NEXTCLOUD_DIR.joinpath("new_nextcloud.zip") - - shutil.copy(zip, zip_copy) - - yield zip_copy - - zip_copy.unlink() - - -def test_upload_nextcloud_zip(api_client: TestClient, api_routes: AppRoutes, nextcloud_zip, admin_token): - upload_url = api_routes.migrations_import_type_upload("nextcloud") - response = api_client.post(upload_url, files={"archive": nextcloud_zip.open("rb")}, headers=admin_token) - - assert response.status_code == 200 - - assert app_dirs.MIGRATION_DIR.joinpath("nextcloud", nextcloud_zip.name).is_file() - - -def test_import_nextcloud_directory(api_client: TestClient, api_routes: AppRoutes, nextcloud_zip, admin_token): - selection = nextcloud_zip.name - import_url = api_routes.migrations_import_type_file_name_import("nextcloud", selection) - response = api_client.post(import_url, headers=admin_token) - - assert response.status_code == 200 - - reports = json.loads(response.content) - for report in reports: - assert report.get("status") is True - - -def test_delete__nextcloud_migration_data( - api_client: TestClient, api_routes: AppRoutes, nextcloud_zip: Path, admin_token -): - selection = nextcloud_zip.name - delete_url = api_routes.migrations_import_type_file_name_delete("nextcloud", selection) - response = api_client.delete(delete_url, headers=admin_token) - - assert response.status_code == 200 - assert not app_dirs.MIGRATION_DIR.joinpath(nextcloud_zip.name).is_file() + for entry in report.get("entries"): + assert entry.get("success") is True