mirror of
https://github.com/mealie-recipes/mealie.git
synced 2025-10-27 16:24:31 -04:00
* feat: import original keywords as tags * remove cached env * Update frontend api types * fix: Issues with previous tag scraping implementation * Update category handling in backend * Update backend tests to include group_id * Correct type check * Update create-url interface * Improve tag cleaner list support * remove builtin name shadowing * update type annotations * test include tags scraper * implement scaper context for optional data * readd cache venv * use continue instead of break * remove test callback Co-authored-by: Miroito <alban.vachette@gmail.com>
57 lines
1.7 KiB
Python
57 lines
1.7 KiB
Python
from enum import Enum
|
|
from uuid import uuid4
|
|
|
|
from fastapi import HTTPException, status
|
|
from slugify import slugify
|
|
|
|
from mealie.core.root_logger import get_logger
|
|
from mealie.pkgs import cache
|
|
from mealie.schema.recipe import Recipe
|
|
from mealie.services.recipe.recipe_data_service import RecipeDataService
|
|
from mealie.services.scraper.scraped_extras import ScrapedExtras
|
|
|
|
from .recipe_scraper import RecipeScraper
|
|
|
|
|
|
class ParserErrors(str, Enum):
|
|
BAD_RECIPE_DATA = "BAD_RECIPE_DATA"
|
|
NO_RECIPE_DATA = "NO_RECIPE_DATA"
|
|
CONNECTION_ERROR = "CONNECTION_ERROR"
|
|
|
|
|
|
def create_from_url(url: str) -> tuple[Recipe, ScrapedExtras]:
|
|
"""Main entry point for generating a recipe from a URL. Pass in a URL and
|
|
a Recipe object will be returned if successful.
|
|
|
|
Args:
|
|
url (str): a valid string representing a URL
|
|
|
|
Returns:
|
|
Recipe: Recipe Object
|
|
"""
|
|
scraper = RecipeScraper()
|
|
new_recipe, extras = scraper.scrape(url)
|
|
|
|
if not new_recipe:
|
|
raise HTTPException(status.HTTP_400_BAD_REQUEST, {"details": ParserErrors.BAD_RECIPE_DATA.value})
|
|
|
|
new_recipe.id = uuid4()
|
|
logger = get_logger()
|
|
logger.info(f"Image {new_recipe.image}")
|
|
|
|
recipe_data_service = RecipeDataService(new_recipe.id)
|
|
|
|
try:
|
|
recipe_data_service.scrape_image(new_recipe.image)
|
|
new_recipe.slug = slugify(new_recipe.name)
|
|
new_recipe.image = cache.new_key(4)
|
|
except Exception as e:
|
|
recipe_data_service.logger.exception(f"Error Scraping Image: {e}")
|
|
new_recipe.image = "no image"
|
|
|
|
if new_recipe.name is None or new_recipe.name == "":
|
|
new_recipe.name = f"No Recipe Name Found - {str(uuid4())}"
|
|
new_recipe.slug = slugify(new_recipe.name)
|
|
|
|
return new_recipe, extras
|