mirror of
https://github.com/mealie-recipes/mealie.git
synced 2026-02-06 07:53:11 -05:00
feat: ✨ add user recipe export functionality (#845)
* feat(frontend): ✨ add user recipe export functionality * remove depreciated folders * change/remove depreciated folders * add testing variable in config * add GUID support for group_id * improve testing feedback on 422 errors * remove/cleanup files/folders * initial user export support * delete unused css * update backup page UI * remove depreciated settings * feat: ✨ export download links * fix #813 * remove top level statements * show footer * add export purger to scheduler * update purge glob * fix meal-planner lockout * feat: ✨ add bulk delete/purge exports * style(frontend): 💄 update UI for site settings * feat: ✨ add version checker * update documentation Co-authored-by: hay-kot <hay-kot@pm.me>
This commit is contained in:
@@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
from functools import cached_property
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
from pydantic import UUID4
|
||||
|
||||
from mealie.schema.group.group import GroupAdminUpdate
|
||||
from mealie.schema.mapper import mapper
|
||||
@@ -16,7 +17,7 @@ from mealie.services.group_services.group_utils import create_new_group
|
||||
|
||||
class AdminGroupService(
|
||||
CrudHttpMixins[GroupBase, GroupInDB, GroupAdminUpdate],
|
||||
AdminHttpService[int, GroupInDB],
|
||||
AdminHttpService[UUID4, GroupInDB],
|
||||
):
|
||||
event_func = create_group_event
|
||||
_schema = GroupInDB
|
||||
@@ -25,7 +26,7 @@ class AdminGroupService(
|
||||
def dal(self):
|
||||
return self.db.groups
|
||||
|
||||
def populate_item(self, id: int) -> GroupInDB:
|
||||
def populate_item(self, id: UUID4) -> GroupInDB:
|
||||
self.item = self.dal.get_one(id)
|
||||
return self.item
|
||||
|
||||
@@ -35,13 +36,13 @@ class AdminGroupService(
|
||||
def create_one(self, data: GroupBase) -> GroupInDB:
|
||||
return create_new_group(self.db, data)
|
||||
|
||||
def update_one(self, data: GroupAdminUpdate, item_id: int = None) -> GroupInDB:
|
||||
def update_one(self, data: GroupAdminUpdate, item_id: UUID4 = None) -> GroupInDB:
|
||||
target_id = item_id or data.id
|
||||
|
||||
if data.preferences:
|
||||
preferences = self.db.group_preferences.get_one(value=target_id, key="group_id")
|
||||
preferences = mapper(data.preferences, preferences)
|
||||
self.item.preferences = self.db.group_preferences.update(preferences.id, preferences)
|
||||
self.item.preferences = self.db.group_preferences.update(target_id, preferences)
|
||||
|
||||
if data.name not in ["", self.item.name]:
|
||||
self.item.name = data.name
|
||||
@@ -49,11 +50,13 @@ class AdminGroupService(
|
||||
|
||||
return self.item
|
||||
|
||||
def delete_one(self, id: int = None) -> GroupInDB:
|
||||
def delete_one(self, id: UUID4 = None) -> GroupInDB:
|
||||
target_id = id or self.item.id
|
||||
|
||||
if len(self.item.users) > 0:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ErrorResponse(message="Cannot delete group with users").dict(),
|
||||
)
|
||||
|
||||
return self._delete_one(id)
|
||||
return self._delete_one(target_id)
|
||||
|
||||
2
mealie/services/exporter/__init__.py
Normal file
2
mealie/services/exporter/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
from .exporter import *
|
||||
from .recipe_exporter import *
|
||||
91
mealie/services/exporter/_abc_exporter.py
Normal file
91
mealie/services/exporter/_abc_exporter.py
Normal file
@@ -0,0 +1,91 @@
|
||||
import zipfile
|
||||
from abc import abstractmethod, abstractproperty
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Callable, Iterator, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from mealie.core.root_logger import get_logger
|
||||
from mealie.db.database import Database
|
||||
from mealie.schema.reports.reports import ReportEntryCreate
|
||||
|
||||
from .._base_service import BaseService
|
||||
|
||||
|
||||
@dataclass
|
||||
class ExportedItem:
|
||||
"""
|
||||
Exported items are the items provided by items() call in an concrete exporter class
|
||||
Where the items are used to write data to the zip file. Models should derive from the
|
||||
BaseModel class OR provide a .json method that returns a json string.
|
||||
"""
|
||||
|
||||
model: BaseModel
|
||||
name: str
|
||||
|
||||
|
||||
class ABCExporter(BaseService):
|
||||
write_dir_to_zip: Callable[[Path, str, Optional[list[str]]], None]
|
||||
|
||||
def __init__(self, db: Database, group_id: UUID) -> None:
|
||||
self.logger = get_logger()
|
||||
self.db = db
|
||||
self.group_id = group_id
|
||||
|
||||
super().__init__()
|
||||
|
||||
@abstractproperty
|
||||
def destination_dir(self) -> str:
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
def items(self) -> Iterator[ExportedItem]:
|
||||
...
|
||||
|
||||
def _post_export_hook(self, _: BaseModel) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def export(self, zip: zipfile.ZipFile) -> list[ReportEntryCreate]:
|
||||
"""
|
||||
Export takes in a zip file and exports the recipes to it. Note that the zip
|
||||
file open/close is NOT handled by this method. You must handle it yourself.
|
||||
|
||||
Args:
|
||||
zip (zipfile.ZipFile): Zip file destination
|
||||
|
||||
Returns:
|
||||
list[ReportEntryCreate]: [description] ???!?!
|
||||
"""
|
||||
self.write_dir_to_zip = self.write_dir_to_zip_func(zip)
|
||||
|
||||
for item in self.items():
|
||||
if item is None:
|
||||
self.logger.error("Failed to export item. no item found")
|
||||
continue
|
||||
|
||||
zip.writestr(f"{self.destination_dir}/{item.name}/{item.name}.json", item.model.json())
|
||||
|
||||
self._post_export_hook(item.model)
|
||||
|
||||
self.write_dir_to_zip = None
|
||||
|
||||
def write_dir_to_zip_func(self, zip: zipfile.ZipFile):
|
||||
"""Returns a recursive function that writes a directory to a zip file.
|
||||
|
||||
Args:
|
||||
zip (zipfile.ZipFile):
|
||||
"""
|
||||
|
||||
def func(source_dir: Path, dest_dir: str, ignore_ext: set[str] = None) -> None:
|
||||
ignore_ext = ignore_ext or set()
|
||||
|
||||
for source_file in source_dir.iterdir():
|
||||
if source_file.is_dir():
|
||||
func(source_file, f"{dest_dir}/{source_file.name}")
|
||||
elif source_file.suffix not in ignore_ext:
|
||||
zip.write(source_file, f"{dest_dir}/{source_file.name}")
|
||||
|
||||
return func
|
||||
51
mealie/services/exporter/exporter.py
Normal file
51
mealie/services/exporter/exporter.py
Normal file
@@ -0,0 +1,51 @@
|
||||
import datetime
|
||||
import shutil
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from mealie.db.database import Database
|
||||
from mealie.schema.group.group_exports import GroupDataExport
|
||||
from mealie.schema.user import GroupInDB
|
||||
from mealie.utils.fs_stats import pretty_size
|
||||
|
||||
from .._base_service import BaseService
|
||||
from ._abc_exporter import ABCExporter
|
||||
|
||||
|
||||
class Exporter(BaseService):
|
||||
def __init__(self, group_id: UUID, temp_zip: Path, exporters: list[ABCExporter]) -> None:
|
||||
super().__init__()
|
||||
|
||||
self.group_id = group_id
|
||||
self.temp_path = temp_zip
|
||||
self.exporters = exporters
|
||||
|
||||
def run(self, db: Database) -> GroupDataExport:
|
||||
# Create Zip File
|
||||
self.temp_path.touch()
|
||||
|
||||
# Open Zip File
|
||||
with zipfile.ZipFile(self.temp_path, "w") as zip:
|
||||
for exporter in self.exporters:
|
||||
exporter.export(zip)
|
||||
|
||||
export_id = uuid4()
|
||||
|
||||
export_path = GroupInDB.get_export_directory(self.group_id) / f"{export_id}.zip"
|
||||
|
||||
shutil.copy(self.temp_path, export_path)
|
||||
|
||||
group_data_export = GroupDataExport(
|
||||
id=export_id,
|
||||
group_id=self.group_id,
|
||||
path=str(export_path),
|
||||
name="Data Export",
|
||||
size=pretty_size(export_path.stat().st_size),
|
||||
filename=export_path.name,
|
||||
expires=datetime.datetime.now() + datetime.timedelta(days=1),
|
||||
)
|
||||
|
||||
db.group_exports.create(group_data_export)
|
||||
|
||||
return group_data_export
|
||||
41
mealie/services/exporter/recipe_exporter.py
Normal file
41
mealie/services/exporter/recipe_exporter.py
Normal file
@@ -0,0 +1,41 @@
|
||||
from typing import Iterator
|
||||
from uuid import UUID
|
||||
|
||||
from mealie.db.database import Database
|
||||
from mealie.schema.recipe import Recipe
|
||||
|
||||
from ._abc_exporter import ABCExporter, ExportedItem
|
||||
|
||||
|
||||
class RecipeExporter(ABCExporter):
|
||||
def __init__(self, db: Database, group_id: UUID, recipes: list[str]) -> None:
|
||||
"""
|
||||
RecipeExporter is used to export a list of recipes to a zip file. The zip
|
||||
file is then saved to a temporary directory and then available for a one-time
|
||||
download.
|
||||
|
||||
Args:
|
||||
db (Database):
|
||||
group_id (int):
|
||||
recipes (list[str]): Recipe Slugs
|
||||
"""
|
||||
super().__init__(db, group_id)
|
||||
self.recipes = recipes
|
||||
|
||||
@property
|
||||
def destination_dir(self) -> str:
|
||||
return "recipes"
|
||||
|
||||
def items(self) -> Iterator[ExportedItem]:
|
||||
for slug in self.recipes:
|
||||
yield ExportedItem(
|
||||
name=slug,
|
||||
model=self.db.recipes.multi_query({"slug": slug, "group_id": self.group_id}, limit=1)[0],
|
||||
)
|
||||
|
||||
def _post_export_hook(self, item: Recipe) -> None:
|
||||
"""Copy recipe directory contents into the zip folder"""
|
||||
recipe_dir = item.directory
|
||||
|
||||
if recipe_dir.exists():
|
||||
self.write_dir_to_zip(recipe_dir, f"{self.destination_dir}/{item.slug}", {".json"})
|
||||
@@ -1,3 +1,5 @@
|
||||
from uuid import uuid4
|
||||
|
||||
from mealie.db.data_access_layer.access_model_factory import Database
|
||||
from mealie.schema.group.group_preferences import CreateGroupPreferences
|
||||
from mealie.schema.user.user import GroupBase, GroupInDB
|
||||
@@ -6,7 +8,8 @@ from mealie.schema.user.user import GroupBase, GroupInDB
|
||||
def create_new_group(db: Database, g_base: GroupBase, g_preferences: CreateGroupPreferences = None) -> GroupInDB:
|
||||
created_group = db.groups.create(g_base)
|
||||
|
||||
g_preferences = g_preferences or CreateGroupPreferences(group_id=0) # Assign Temporary ID before group is created
|
||||
# Assign Temporary ID before group is created
|
||||
g_preferences = g_preferences or CreateGroupPreferences(group_id=uuid4())
|
||||
|
||||
g_preferences.group_id = created_group.id
|
||||
|
||||
|
||||
@@ -99,6 +99,9 @@ def sizeof_fmt(file_path: Path, decimal_places=2):
|
||||
|
||||
|
||||
def move_all_images():
|
||||
if not app_dirs.IMG_DIR.exists():
|
||||
return
|
||||
|
||||
for image_file in app_dirs.IMG_DIR.iterdir():
|
||||
if image_file.is_file():
|
||||
if image_file.name == ".DS_Store":
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from pathlib import Path
|
||||
from typing import Tuple
|
||||
from uuid import UUID
|
||||
|
||||
from mealie.core import root_logger
|
||||
from mealie.db.database import Database
|
||||
@@ -25,7 +26,7 @@ class BaseMigrator(BaseService):
|
||||
report_id: int
|
||||
report: ReportOut
|
||||
|
||||
def __init__(self, archive: Path, db: Database, session, user_id: int, group_id: int):
|
||||
def __init__(self, archive: Path, db: Database, session, user_id: int, group_id: UUID):
|
||||
self.archive = archive
|
||||
self.db = db
|
||||
self.session = session
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import tempfile
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
from uuid import UUID
|
||||
|
||||
from mealie.db.database import Database
|
||||
|
||||
@@ -10,7 +11,7 @@ from .utils.migration_helpers import MigrationReaders, import_image, split_by_co
|
||||
|
||||
|
||||
class ChowdownMigrator(BaseMigrator):
|
||||
def __init__(self, archive: Path, db: Database, session, user_id: int, group_id: int):
|
||||
def __init__(self, archive: Path, db: Database, session, user_id: int, group_id: UUID):
|
||||
super().__init__(archive, db, session, user_id, group_id)
|
||||
|
||||
self.key_aliases = [
|
||||
|
||||
@@ -3,6 +3,7 @@ import zipfile
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from slugify import slugify
|
||||
|
||||
@@ -39,7 +40,7 @@ class NextcloudDir:
|
||||
|
||||
|
||||
class NextcloudMigrator(BaseMigrator):
|
||||
def __init__(self, archive: Path, db: Database, session, user_id: int, group_id: int):
|
||||
def __init__(self, archive: Path, db: Database, session, user_id: int, group_id: UUID):
|
||||
super().__init__(archive, db, session, user_id, group_id)
|
||||
|
||||
self.key_aliases = [
|
||||
|
||||
@@ -3,10 +3,12 @@ from __future__ import annotations
|
||||
from pathlib import Path
|
||||
|
||||
from mealie.core.root_logger import get_logger
|
||||
from mealie.schema.group.group_exports import GroupDataExport
|
||||
from mealie.schema.recipe import CategoryBase, Recipe
|
||||
from mealie.schema.recipe.recipe_category import TagBase
|
||||
from mealie.services._base_http_service.http_services import UserHttpService
|
||||
from mealie.services.events import create_recipe_event
|
||||
from mealie.services.exporter import Exporter, RecipeExporter
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
@@ -18,8 +20,36 @@ class RecipeBulkActions(UserHttpService[int, Recipe]):
|
||||
def populate_item(self, _: int) -> Recipe:
|
||||
return
|
||||
|
||||
def export_recipes(self, temp_path: Path, recipes: list[str]) -> None:
|
||||
return
|
||||
def export_recipes(self, temp_path: Path, slugs: list[str]) -> None:
|
||||
recipe_exporter = RecipeExporter(self.db, self.group_id, slugs)
|
||||
exporter = Exporter(self.group_id, temp_path, [recipe_exporter])
|
||||
|
||||
exporter.run(self.db)
|
||||
|
||||
def get_exports(self) -> list[GroupDataExport]:
|
||||
return self.db.group_exports.multi_query({"group_id": self.group_id})
|
||||
|
||||
def purge_exports(self) -> int:
|
||||
all_exports = self.get_exports()
|
||||
|
||||
exports_deleted = 0
|
||||
for export in all_exports:
|
||||
try:
|
||||
Path(export.path).unlink(missing_ok=True)
|
||||
self.db.group_exports.delete(export.id)
|
||||
exports_deleted += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete export {export.id}")
|
||||
logger.error(e)
|
||||
|
||||
group = self.db.groups.get_one(self.group_id)
|
||||
|
||||
for match in group.directory.glob("**/export/*zip"):
|
||||
if match.is_file():
|
||||
match.unlink()
|
||||
exports_deleted += 1
|
||||
|
||||
return exports_deleted
|
||||
|
||||
def assign_tags(self, recipes: list[str], tags: list[TagBase]) -> None:
|
||||
for slug in recipes:
|
||||
|
||||
@@ -59,14 +59,18 @@ class RecipeService(CrudHttpMixins[CreateRecipe, Recipe, Recipe], UserHttpServic
|
||||
if not self.item.settings.public and not self.user:
|
||||
raise HTTPException(status.HTTP_403_FORBIDDEN)
|
||||
|
||||
def get_all(self, start=0, limit=None):
|
||||
items = self.db.recipes.summary(self.user.group_id, start=start, limit=limit)
|
||||
def get_all(self, start=0, limit=None, load_foods=False) -> list[RecipeSummary]:
|
||||
items = self.db.recipes.summary(self.user.group_id, start=start, limit=limit, load_foods=load_foods)
|
||||
|
||||
new_items = []
|
||||
|
||||
for item in items:
|
||||
# Pydantic/FastAPI can't seem to serialize the ingredient field on thier own.
|
||||
new_item = item.__dict__
|
||||
new_item["recipe_ingredient"] = [x.__dict__ for x in item.recipe_ingredient]
|
||||
|
||||
if load_foods:
|
||||
new_item["recipe_ingredient"] = [x.__dict__ for x in item.recipe_ingredient]
|
||||
|
||||
new_items.append(new_item)
|
||||
|
||||
return [RecipeSummary.construct(**x) for x in new_items]
|
||||
|
||||
@@ -15,7 +15,7 @@ CWD = Path(__file__).parent
|
||||
|
||||
app_dirs = get_app_dirs()
|
||||
TEMP_DATA = app_dirs.DATA_DIR / ".temp"
|
||||
SCHEDULER_DB = TEMP_DATA / "scheduler.db"
|
||||
SCHEDULER_DB = CWD / ".scheduler.db"
|
||||
SCHEDULER_DATABASE = f"sqlite:///{SCHEDULER_DB}"
|
||||
|
||||
MINUTES_DAY = 1440
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from .auto_backup import *
|
||||
from .purge_events import *
|
||||
from .purge_group_exports import *
|
||||
from .purge_password_reset import *
|
||||
from .purge_registration import *
|
||||
from .webhooks import *
|
||||
|
||||
46
mealie/services/scheduler/tasks/purge_group_exports.py
Normal file
46
mealie/services/scheduler/tasks/purge_group_exports.py
Normal file
@@ -0,0 +1,46 @@
|
||||
import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from mealie.core import root_logger
|
||||
from mealie.core.config import get_app_dirs
|
||||
from mealie.db.db_setup import create_session
|
||||
from mealie.db.models.group.exports import GroupDataExportsModel
|
||||
|
||||
ONE_DAY_AS_MINUTES = 1440
|
||||
|
||||
|
||||
def purge_group_data_exports(max_minutes_old=ONE_DAY_AS_MINUTES):
|
||||
"""Purges all group exports after x days"""
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
logger.info("purging group data exports")
|
||||
limit = datetime.datetime.now() - datetime.timedelta(minutes=max_minutes_old)
|
||||
session = create_session()
|
||||
|
||||
results = session.query(GroupDataExportsModel).filter(GroupDataExportsModel.expires <= limit)
|
||||
|
||||
total_removed = 0
|
||||
for result in results:
|
||||
session.delete(result)
|
||||
Path(result.path).unlink(missing_ok=True)
|
||||
total_removed += 1
|
||||
|
||||
session.commit()
|
||||
session.close()
|
||||
|
||||
logger.info(f"finished purging group data exports. {total_removed} exports removed from group data")
|
||||
|
||||
|
||||
def purge_excess_files() -> None:
|
||||
"""Purges all files in the uploads directory that are older than 2 days"""
|
||||
directories = get_app_dirs()
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
limit = datetime.datetime.now() - datetime.timedelta(minutes=ONE_DAY_AS_MINUTES * 2)
|
||||
|
||||
for file in directories.GROUPS_DIR.glob("**/export/*.zip"):
|
||||
if file.stat().st_mtime < limit:
|
||||
file.unlink()
|
||||
logger.info(f"excess group file removed '{file}'")
|
||||
|
||||
logger.info("finished purging excess files")
|
||||
@@ -1,3 +1,5 @@
|
||||
from uuid import uuid4
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
|
||||
from mealie.core.root_logger import get_logger
|
||||
@@ -69,7 +71,7 @@ class RegistrationService(PublicHttpService[int, str]):
|
||||
group_data = GroupBase(name=self.registration.group)
|
||||
|
||||
group_preferences = CreateGroupPreferences(
|
||||
group_id=0,
|
||||
group_id=uuid4(),
|
||||
private_group=self.registration.private,
|
||||
first_day_of_week=0,
|
||||
recipe_public=not self.registration.private,
|
||||
|
||||
Reference in New Issue
Block a user