mirror of
https://github.com/mealie-recipes/mealie.git
synced 2025-12-10 20:35:14 -05:00
feat: add reports to bulk recipe import (url) (#1294)
* remove unused docker and caddy configs * add experimental nested configs * switch to nest under docker-compose * remove v-card * bulk parser backend re-implementation * refactor UI for bulk importer * remove migration specific report text
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
from functools import cached_property
|
||||
|
||||
from fastapi import APIRouter
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from pydantic import UUID4
|
||||
|
||||
from mealie.core.exceptions import mealie_registered_exceptions
|
||||
@@ -8,6 +8,7 @@ from mealie.routes._base.base_controllers import BaseUserController
|
||||
from mealie.routes._base.controller import controller
|
||||
from mealie.routes._base.mixins import HttpRepo
|
||||
from mealie.schema.reports.reports import ReportCategory, ReportCreate, ReportOut, ReportSummary
|
||||
from mealie.schema.response.responses import ErrorResponse, SuccessResponse
|
||||
|
||||
router = APIRouter(prefix="/groups/reports", tags=["Groups: Reports"])
|
||||
|
||||
@@ -39,6 +40,10 @@ class GroupReportsController(BaseUserController):
|
||||
def get_one(self, item_id: UUID4):
|
||||
return self.mixins.get_one(item_id)
|
||||
|
||||
@router.delete("/{item_id}", status_code=204)
|
||||
@router.delete("/{item_id}", status_code=200)
|
||||
def delete_one(self, item_id: UUID4):
|
||||
self.mixins.delete_one(item_id) # type: ignore
|
||||
try:
|
||||
self.mixins.delete_one(item_id) # type: ignore
|
||||
return SuccessResponse.respond("Report deleted.")
|
||||
except Exception as ex:
|
||||
raise HTTPException(500, ErrorResponse.respond("Failed to delete report")) from ex
|
||||
|
||||
@@ -9,7 +9,6 @@ from fastapi.encoders import jsonable_encoder
|
||||
from fastapi.responses import JSONResponse
|
||||
from pydantic import BaseModel, Field
|
||||
from slugify import slugify
|
||||
from sqlalchemy.orm.session import Session
|
||||
from starlette.responses import FileResponse
|
||||
|
||||
from mealie.core import exceptions
|
||||
@@ -17,7 +16,6 @@ from mealie.core.dependencies import temporary_zip_path
|
||||
from mealie.core.dependencies.dependencies import temporary_dir, validate_recipe_token
|
||||
from mealie.core.security import create_recipe_slug_token
|
||||
from mealie.pkgs import cache
|
||||
from mealie.repos.all_repositories import get_repositories
|
||||
from mealie.repos.repository_recipes import RepositoryRecipes
|
||||
from mealie.routes._base import BaseUserController, controller
|
||||
from mealie.routes._base.mixins import HttpRepo
|
||||
@@ -29,17 +27,16 @@ from mealie.schema.recipe.recipe_asset import RecipeAsset
|
||||
from mealie.schema.recipe.recipe_scraper import ScrapeRecipeTest
|
||||
from mealie.schema.recipe.request_helpers import RecipeZipTokenResponse, UpdateImageResponse
|
||||
from mealie.schema.response.responses import ErrorResponse
|
||||
from mealie.schema.server.tasks import ServerTaskNames
|
||||
from mealie.services import urls
|
||||
from mealie.services.event_bus_service.event_bus_service import EventBusService
|
||||
from mealie.services.event_bus_service.message_types import EventTypes
|
||||
from mealie.services.recipe.recipe_data_service import RecipeDataService
|
||||
from mealie.services.recipe.recipe_service import RecipeService
|
||||
from mealie.services.recipe.template_service import TemplateService
|
||||
from mealie.services.scraper.recipe_bulk_scraper import RecipeBulkScraperService
|
||||
from mealie.services.scraper.scraped_extras import ScraperContext
|
||||
from mealie.services.scraper.scraper import create_from_url
|
||||
from mealie.services.scraper.scraper_strategies import RecipeScraperPackage
|
||||
from mealie.services.server_tasks.background_executory import BackgroundExecutor
|
||||
|
||||
|
||||
class BaseRecipeController(BaseUserController):
|
||||
@@ -172,39 +169,11 @@ class RecipeController(BaseRecipeController):
|
||||
@router.post("/create-url/bulk", status_code=202)
|
||||
def parse_recipe_url_bulk(self, bulk: CreateRecipeByUrlBulk, bg_tasks: BackgroundTasks):
|
||||
"""Takes in a URL and attempts to scrape data and load it into the database"""
|
||||
bg_executor = BackgroundExecutor(self.group.id, self.repos, bg_tasks)
|
||||
bulk_scraper = RecipeBulkScraperService(self.service, self.repos, self.group)
|
||||
report_id = bulk_scraper.get_report_id()
|
||||
bg_tasks.add_task(bulk_scraper.scrape, bulk)
|
||||
|
||||
def bulk_import_func(task_id: int, session: Session) -> None:
|
||||
database = get_repositories(session)
|
||||
task = database.server_tasks.get_one(task_id)
|
||||
|
||||
task.append_log("test task has started")
|
||||
|
||||
for b in bulk.imports:
|
||||
try:
|
||||
recipe, _ = create_from_url(b.url)
|
||||
|
||||
if b.tags:
|
||||
recipe.tags = b.tags
|
||||
|
||||
if b.categories:
|
||||
recipe.recipe_category = b.categories
|
||||
|
||||
self.service.create_one(recipe)
|
||||
task.append_log(f"INFO: Created recipe from url: {b.url}")
|
||||
except Exception as e:
|
||||
task.append_log(f"Error: Failed to create recipe from url: {b.url}")
|
||||
task.append_log(f"Error: {e}")
|
||||
self.deps.logger.error(f"Failed to create recipe from url: {b.url}")
|
||||
self.deps.logger.error(e)
|
||||
database.server_tasks.update(task.id, task)
|
||||
|
||||
task.set_finished()
|
||||
database.server_tasks.update(task.id, task)
|
||||
|
||||
bg_executor.dispatch(ServerTaskNames.bulk_recipe_import, bulk_import_func)
|
||||
|
||||
return {"details": "task has been started"}
|
||||
return {"reportId": report_id}
|
||||
|
||||
@router.post("/test-scrape-url")
|
||||
def test_parse_recipe_url(self, url: ScrapeRecipeTest):
|
||||
|
||||
@@ -11,6 +11,7 @@ class ReportCategory(str, enum.Enum):
|
||||
backup = "backup"
|
||||
restore = "restore"
|
||||
migration = "migration"
|
||||
bulk_import = "bulk_import"
|
||||
|
||||
|
||||
class ReportSummaryStatus(str, enum.Enum):
|
||||
|
||||
104
mealie/services/scraper/recipe_bulk_scraper.py
Normal file
104
mealie/services/scraper/recipe_bulk_scraper.py
Normal file
@@ -0,0 +1,104 @@
|
||||
from pydantic import UUID4
|
||||
|
||||
from mealie.repos.repository_factory import AllRepositories
|
||||
from mealie.schema.recipe.recipe import CreateRecipeByUrlBulk
|
||||
from mealie.schema.reports.reports import ReportCategory, ReportCreate, ReportEntryCreate, ReportSummaryStatus
|
||||
from mealie.schema.user.user import GroupInDB
|
||||
from mealie.services._base_service import BaseService
|
||||
from mealie.services.recipe.recipe_service import RecipeService
|
||||
from mealie.services.scraper.scraper import create_from_url
|
||||
|
||||
|
||||
class RecipeBulkScraperService(BaseService):
|
||||
report_entries: list[ReportEntryCreate]
|
||||
|
||||
def __init__(self, service: RecipeService, repos: AllRepositories, group: GroupInDB) -> None:
|
||||
self.service = service
|
||||
self.repos = repos
|
||||
self.group = group
|
||||
self.report_entries = []
|
||||
|
||||
super().__init__()
|
||||
|
||||
def get_report_id(self) -> UUID4:
|
||||
import_report = ReportCreate(
|
||||
name="Bulk Import",
|
||||
category=ReportCategory.bulk_import,
|
||||
status=ReportSummaryStatus.in_progress,
|
||||
group_id=self.group.id,
|
||||
)
|
||||
|
||||
self.report = self.repos.group_reports.create(import_report)
|
||||
return self.report.id
|
||||
|
||||
def _add_error_entry(self, message: str, exception: str = "") -> None:
|
||||
self.report_entries.append(
|
||||
ReportEntryCreate(
|
||||
report_id=self.report.id,
|
||||
success=False,
|
||||
message=message,
|
||||
exception=exception,
|
||||
)
|
||||
)
|
||||
|
||||
def _save_all_entries(self) -> None:
|
||||
is_success = True
|
||||
is_failure = True
|
||||
|
||||
for entry in self.report_entries:
|
||||
if is_failure and entry.success:
|
||||
is_failure = False
|
||||
|
||||
if is_success and not entry.success:
|
||||
is_success = False
|
||||
|
||||
self.repos.group_report_entries.create(entry)
|
||||
|
||||
if is_success:
|
||||
self.report.status = ReportSummaryStatus.success
|
||||
|
||||
if is_failure:
|
||||
self.report.status = ReportSummaryStatus.failure
|
||||
|
||||
if not is_success and not is_failure:
|
||||
self.report.status = ReportSummaryStatus.partial
|
||||
|
||||
self.repos.group_reports.update(self.report.id, self.report)
|
||||
|
||||
def scrape(self, urls: CreateRecipeByUrlBulk) -> None:
|
||||
if self.report is None:
|
||||
self.get_report_id()
|
||||
|
||||
for b in urls.imports:
|
||||
|
||||
try:
|
||||
recipe, _ = create_from_url(b.url)
|
||||
except Exception as e:
|
||||
self.service.logger.error(f"failed to scrape url during bulk url import {b.url}")
|
||||
self.service.logger.exception(e)
|
||||
self._add_error_entry(f"failed to scrape url {b.url}", str(e))
|
||||
break
|
||||
|
||||
if b.tags:
|
||||
recipe.tags = b.tags
|
||||
|
||||
if b.categories:
|
||||
recipe.recipe_category = b.categories
|
||||
|
||||
try:
|
||||
self.service.create_one(recipe)
|
||||
except Exception as e:
|
||||
self.service.logger.error(f"Failed to save recipe to database during bulk url import {b.url}")
|
||||
self.service.logger.exception(e)
|
||||
self._add_error_entry("Failed to save recipe to database during bulk url import", str(e))
|
||||
|
||||
self.report_entries.append(
|
||||
ReportEntryCreate(
|
||||
report_id=self.report.id,
|
||||
success=True,
|
||||
message=f"Successfully imported recipe {recipe.name}",
|
||||
exception="",
|
||||
)
|
||||
)
|
||||
|
||||
self._save_all_entries()
|
||||
Reference in New Issue
Block a user