mirror of
https://github.com/mealie-recipes/mealie.git
synced 2026-01-02 23:21:21 -05:00
feature: proper multi-tenant-support (#969)(WIP)
* update naming * refactor tests to use shared structure * shorten names * add tools test case * refactor to support multi-tenant * set group_id on creation * initial refactor for multitenant tags/cats * spelling * additional test case for same valued resources * fix recipe update tests * apply indexes to foreign keys * fix performance regressions * handle unknown exception * utility decorator for function debugging * migrate recipe_id to UUID * GUID for recipes * remove unused import * move image functions into package * move utilities to packages dir * update import * linter * image image and asset routes * update assets and images to use UUIDs * fix migration base * image asset test coverage * use ids for categories and tag crud functions * refactor recipe organizer test suite to reduce duplication * add uuid serlization utility * organizer base router * slug routes testing and fixes * fix postgres error * adopt UUIDs * move tags, categories, and tools under "organizers" umbrella * update composite label * generate ts types * fix import error * update frontend types * fix type errors * fix postgres errors * fix #978 * add null check for title validation * add note in docs on multi-tenancy
This commit is contained in:
@@ -12,7 +12,6 @@ from mealie.repos.all_repositories import get_repositories
|
||||
from mealie.schema.admin import CommentImport, GroupImport, RecipeImport, UserImport
|
||||
from mealie.schema.recipe import Recipe, RecipeCommentOut
|
||||
from mealie.schema.user import PrivateUser, UpdateGroup
|
||||
from mealie.services.image import minify
|
||||
|
||||
app_dirs = get_app_dirs()
|
||||
|
||||
@@ -156,8 +155,6 @@ class ImportDatabase:
|
||||
recipe_dir = self.import_dir.joinpath("recipes")
|
||||
shutil.copytree(recipe_dir, app_dirs.RECIPE_DATA_DIR, dirs_exist_ok=True)
|
||||
|
||||
minify.migrate_images()
|
||||
|
||||
def import_settings(self):
|
||||
return []
|
||||
|
||||
|
||||
@@ -4,10 +4,10 @@ import zipfile
|
||||
from pathlib import Path
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from mealie.pkgs.stats.fs_stats import pretty_size
|
||||
from mealie.repos.all_repositories import AllRepositories
|
||||
from mealie.schema.group.group_exports import GroupDataExport
|
||||
from mealie.schema.user import GroupInDB
|
||||
from mealie.utils.fs_stats import pretty_size
|
||||
|
||||
from .._base_service import BaseService
|
||||
from ._abc_exporter import ABCExporter
|
||||
|
||||
@@ -100,7 +100,7 @@ class ShoppingListService:
|
||||
# =======================================================================
|
||||
# Methods
|
||||
|
||||
def add_recipe_ingredients_to_list(self, list_id: UUID4, recipe_id: int) -> ShoppingListOut:
|
||||
def add_recipe_ingredients_to_list(self, list_id: UUID4, recipe_id: UUID4) -> ShoppingListOut:
|
||||
recipe = self.repos.recipes.get_one(recipe_id, "id")
|
||||
to_create = []
|
||||
|
||||
@@ -161,7 +161,7 @@ class ShoppingListService:
|
||||
|
||||
return updated_list
|
||||
|
||||
def remove_recipe_ingredients_from_list(self, list_id: UUID4, recipe_id: int) -> ShoppingListOut:
|
||||
def remove_recipe_ingredients_from_list(self, list_id: UUID4, recipe_id: UUID4) -> ShoppingListOut:
|
||||
shopping_list = self.shopping_lists.get_one(list_id)
|
||||
|
||||
for item in shopping_list.list_items:
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
import requests
|
||||
|
||||
from mealie.core import root_logger
|
||||
from mealie.schema.recipe import Recipe
|
||||
from mealie.services.image import minify
|
||||
|
||||
|
||||
def write_image(recipe_slug: str, file_data: bytes, extension: str) -> Path:
|
||||
image_dir = Recipe(slug=recipe_slug).image_dir
|
||||
extension = extension.replace(".", "")
|
||||
image_path = image_dir.joinpath(f"original.{extension}")
|
||||
image_path.unlink(missing_ok=True)
|
||||
|
||||
if isinstance(file_data, Path):
|
||||
shutil.copy2(file_data, image_path)
|
||||
elif isinstance(file_data, bytes):
|
||||
with open(image_path, "ab") as f:
|
||||
f.write(file_data)
|
||||
else:
|
||||
with open(image_path, "ab") as f:
|
||||
shutil.copyfileobj(file_data, f)
|
||||
|
||||
minify.minify_image(image_path, force=True)
|
||||
|
||||
return image_path
|
||||
|
||||
|
||||
def scrape_image(image_url: str, slug: str) -> Path:
|
||||
logger = root_logger.get_logger()
|
||||
logger.info(f"Image URL: {image_url}")
|
||||
_FIREFOX_UA = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:86.0) Gecko/20100101 Firefox/86.0"
|
||||
|
||||
if isinstance(image_url, str): # Handles String Types
|
||||
pass
|
||||
|
||||
if isinstance(image_url, list): # Handles List Types
|
||||
# Multiple images have been defined in the schema - usually different resolutions
|
||||
# Typically would be in smallest->biggest order, but can't be certain so test each.
|
||||
# 'Google will pick the best image to display in Search results based on the aspect ratio and resolution.'
|
||||
|
||||
all_image_requests = []
|
||||
for url in image_url:
|
||||
if isinstance(url, dict):
|
||||
url = url.get("url", "")
|
||||
try:
|
||||
r = requests.get(url, stream=True, headers={"User-Agent": _FIREFOX_UA})
|
||||
except Exception:
|
||||
logger.exception("Image {url} could not be requested")
|
||||
continue
|
||||
if r.status_code == 200:
|
||||
all_image_requests.append((url, r))
|
||||
|
||||
image_url, _ = max(all_image_requests, key=lambda url_r: len(url_r[1].content), default=("", 0))
|
||||
|
||||
if isinstance(image_url, dict): # Handles Dictionary Types
|
||||
for key in image_url:
|
||||
if key == "url":
|
||||
image_url = image_url.get("url")
|
||||
|
||||
filename = slug + "." + image_url.split(".")[-1]
|
||||
filename = Recipe(slug=slug).image_dir.joinpath(filename)
|
||||
|
||||
try:
|
||||
r = requests.get(image_url, stream=True, headers={"User-Agent": _FIREFOX_UA})
|
||||
except Exception:
|
||||
logger.exception("Fatal Image Request Exception")
|
||||
return None
|
||||
|
||||
if r.status_code == 200:
|
||||
r.raw.decode_content = True
|
||||
logger.info(f"File Name Suffix {filename.suffix}")
|
||||
write_image(slug, r.raw, filename.suffix)
|
||||
|
||||
filename.unlink(missing_ok=True)
|
||||
|
||||
return Path(slug)
|
||||
|
||||
return None
|
||||
@@ -1,149 +0,0 @@
|
||||
import shutil
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
from PIL import Image
|
||||
|
||||
from mealie.core import root_logger
|
||||
from mealie.core.config import get_app_dirs
|
||||
from mealie.schema.recipe import Recipe
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
app_dirs = get_app_dirs()
|
||||
|
||||
|
||||
@dataclass
|
||||
class ImageSizes:
|
||||
org: str
|
||||
min: str
|
||||
tiny: str
|
||||
|
||||
|
||||
def get_image_sizes(org_img: Path, min_img: Path, tiny_img: Path) -> ImageSizes:
|
||||
return ImageSizes(org=sizeof_fmt(org_img), min=sizeof_fmt(min_img), tiny=sizeof_fmt(tiny_img))
|
||||
|
||||
|
||||
def to_webp(image_file: Path, quality: int = 100) -> Path:
|
||||
"""
|
||||
Converts an image to the webp format in-place. The original image is not
|
||||
removed By default, the quality is set to 100.
|
||||
"""
|
||||
if image_file.suffix == ".webp":
|
||||
return image_file
|
||||
|
||||
img = Image.open(image_file)
|
||||
|
||||
dest = image_file.with_suffix(".webp")
|
||||
img.save(dest, "WEBP", quality=quality)
|
||||
|
||||
return dest
|
||||
|
||||
|
||||
def minify_image(image_file: Path, force=False) -> ImageSizes:
|
||||
"""Minifies an image in it's original file format. Quality is lost
|
||||
|
||||
Args:
|
||||
my_path (Path): Source Files
|
||||
min_dest (Path): FULL Destination File Path
|
||||
tiny_dest (Path): FULL Destination File Path
|
||||
"""
|
||||
|
||||
def cleanup(dir: Path) -> None:
|
||||
for file in dir.glob("*.*"):
|
||||
if file.suffix != ".webp":
|
||||
file.unlink()
|
||||
|
||||
org_dest = image_file.parent.joinpath("original.webp")
|
||||
min_dest = image_file.parent.joinpath("min-original.webp")
|
||||
tiny_dest = image_file.parent.joinpath("tiny-original.webp")
|
||||
|
||||
cleanup_images = False
|
||||
|
||||
if min_dest.exists() and tiny_dest.exists() and org_dest.exists() and not force:
|
||||
return
|
||||
try:
|
||||
img = Image.open(image_file)
|
||||
|
||||
img.save(org_dest, "WEBP")
|
||||
basewidth = 720
|
||||
wpercent = basewidth / float(img.size[0])
|
||||
hsize = int((float(img.size[1]) * float(wpercent)))
|
||||
img = img.resize((basewidth, hsize), Image.ANTIALIAS)
|
||||
img.save(min_dest, "WEBP", quality=70)
|
||||
|
||||
tiny_image = crop_center(img)
|
||||
tiny_image.save(tiny_dest, "WEBP", quality=70)
|
||||
|
||||
cleanup_images = True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
shutil.copy(image_file, min_dest)
|
||||
shutil.copy(image_file, tiny_dest)
|
||||
|
||||
image_sizes = get_image_sizes(image_file, min_dest, tiny_dest)
|
||||
|
||||
logger.info(f"{image_file.name} Minified: {image_sizes.org} -> {image_sizes.min} -> {image_sizes.tiny}")
|
||||
|
||||
if cleanup_images:
|
||||
cleanup(image_file.parent)
|
||||
|
||||
return image_sizes
|
||||
|
||||
|
||||
def crop_center(pil_img, crop_width=300, crop_height=300):
|
||||
img_width, img_height = pil_img.size
|
||||
return pil_img.crop(
|
||||
(
|
||||
(img_width - crop_width) // 2,
|
||||
(img_height - crop_height) // 2,
|
||||
(img_width + crop_width) // 2,
|
||||
(img_height + crop_height) // 2,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def sizeof_fmt(file_path: Path, decimal_places=2):
|
||||
if not file_path.exists():
|
||||
return "(File Not Found)"
|
||||
size = file_path.stat().st_size
|
||||
for unit in ["B", "kB", "MB", "GB", "TB", "PB"]:
|
||||
if size < 1024.0 or unit == "PiB":
|
||||
break
|
||||
size /= 1024.0
|
||||
return f"{size:.{decimal_places}f} {unit}"
|
||||
|
||||
|
||||
def move_all_images():
|
||||
if not app_dirs.IMG_DIR.exists():
|
||||
return
|
||||
|
||||
for image_file in app_dirs.IMG_DIR.iterdir():
|
||||
if image_file.is_file():
|
||||
if image_file.name == ".DS_Store":
|
||||
continue
|
||||
new_folder = app_dirs.IMG_DIR.joinpath(image_file.stem)
|
||||
new_folder.mkdir(parents=True, exist_ok=True)
|
||||
new_file = new_folder.joinpath(f"original{image_file.suffix}")
|
||||
if new_file.is_file():
|
||||
new_file.unlink()
|
||||
image_file.rename(new_file)
|
||||
if image_file.is_dir():
|
||||
slug = image_file.name
|
||||
image_file.rename(Recipe(slug=slug).image_dir)
|
||||
|
||||
|
||||
def migrate_images():
|
||||
logger.info("Checking for Images to Minify...")
|
||||
|
||||
move_all_images()
|
||||
|
||||
for image in app_dirs.RECIPE_DATA_DIR.glob("**/original.*"):
|
||||
|
||||
minify_image(image)
|
||||
|
||||
logger.info("Finished Minification Check")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
migrate_images()
|
||||
@@ -29,6 +29,8 @@ class BaseMigrator(BaseService):
|
||||
report_id: int
|
||||
report: ReportOut
|
||||
|
||||
helpers: DatabaseMigrationHelpers
|
||||
|
||||
def __init__(
|
||||
self, archive: Path, db: AllRepositories, session, user_id: UUID4, group_id: UUID, add_migration_tag: bool
|
||||
):
|
||||
@@ -94,7 +96,7 @@ class BaseMigrator(BaseService):
|
||||
self._save_all_entries()
|
||||
return self.db.group_reports.get(self.report_id)
|
||||
|
||||
def import_recipes_to_database(self, validated_recipes: list[Recipe]) -> list[Tuple[str, bool]]:
|
||||
def import_recipes_to_database(self, validated_recipes: list[Recipe]) -> list[Tuple[str, UUID4, bool]]:
|
||||
"""
|
||||
Used as a single access point to process a list of Recipe objects into the
|
||||
database in a predictable way. If an error occurs the session is rolled back
|
||||
@@ -114,13 +116,19 @@ class BaseMigrator(BaseService):
|
||||
recipe.user_id = self.user_id
|
||||
recipe.group_id = self.group_id
|
||||
|
||||
if recipe.tags:
|
||||
recipe.tags = self.helpers.get_or_set_tags(x.name for x in recipe.tags)
|
||||
|
||||
if recipe.recipe_category:
|
||||
recipe.recipe_category = self.helpers.get_or_set_category(x.name for x in recipe.recipe_category)
|
||||
|
||||
if self.add_migration_tag:
|
||||
recipe.tags.append(migration_tag)
|
||||
|
||||
exception = ""
|
||||
status = False
|
||||
try:
|
||||
self.db.recipes.create(recipe)
|
||||
recipe = self.db.recipes.create(recipe)
|
||||
status = True
|
||||
|
||||
except Exception as inst:
|
||||
@@ -133,7 +141,7 @@ class BaseMigrator(BaseService):
|
||||
else:
|
||||
message = f"Failed to import {recipe.name}"
|
||||
|
||||
return_vars.append((recipe.slug, status))
|
||||
return_vars.append((recipe.slug, recipe.id, status))
|
||||
|
||||
self.report_entries.append(
|
||||
ReportEntryCreate(
|
||||
@@ -181,16 +189,11 @@ class BaseMigrator(BaseService):
|
||||
"""
|
||||
recipe_dict = self.rewrite_alias(recipe_dict)
|
||||
|
||||
# Temporary hold out of recipe_dict
|
||||
# temp_categories = recipe_dict["recipeCategory"]
|
||||
# temp_tools = recipe_dict["tools"]
|
||||
# temp_tasg = recipe_dict["tags"]
|
||||
try:
|
||||
del recipe_dict["id"]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
recipe_dict = cleaner.clean(recipe_dict, url=recipe_dict.get("org_url", None))
|
||||
|
||||
# Reassign after cleaning
|
||||
# recipe_dict["recipeCategory"] = temp_categories
|
||||
# recipe_dict["tools"] = temp_tools
|
||||
# recipe_dict["tags"] = temp_tasg
|
||||
|
||||
return Recipe(**recipe_dict)
|
||||
|
||||
@@ -39,7 +39,7 @@ class ChowdownMigrator(BaseMigrator):
|
||||
|
||||
recipe_lookup = {r.slug: r for r in recipes}
|
||||
|
||||
for slug, status in results:
|
||||
for slug, recipe_id, status in results:
|
||||
if status:
|
||||
try:
|
||||
original_image = recipe_lookup.get(slug).image
|
||||
@@ -47,4 +47,4 @@ class ChowdownMigrator(BaseMigrator):
|
||||
except StopIteration:
|
||||
continue
|
||||
if cd_image:
|
||||
import_image(cd_image, slug)
|
||||
import_image(cd_image, recipe_id)
|
||||
|
||||
@@ -32,6 +32,7 @@ class MealieAlphaMigrator(BaseMigrator):
|
||||
del recipe["date_added"]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Migration from list to Object Type Data
|
||||
try:
|
||||
if "" in recipe["tags"]:
|
||||
@@ -42,7 +43,6 @@ class MealieAlphaMigrator(BaseMigrator):
|
||||
try:
|
||||
if "" in recipe["categories"]:
|
||||
recipe["categories"] = [cat for cat in recipe["categories"] if cat != ""]
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -76,14 +76,11 @@ class MealieAlphaMigrator(BaseMigrator):
|
||||
|
||||
results = self.import_recipes_to_database(recipes)
|
||||
|
||||
recipe_model_lookup = {x.slug: x for x in recipes}
|
||||
|
||||
for slug, status in results:
|
||||
for slug, recipe_id, status in results:
|
||||
if not status:
|
||||
continue
|
||||
|
||||
model = recipe_model_lookup.get(slug)
|
||||
dest_dir = model.directory
|
||||
dest_dir = Recipe.directory_from_id(recipe_id)
|
||||
source_dir = recipe_lookup.get(slug)
|
||||
|
||||
if dest_dir.exists():
|
||||
|
||||
@@ -65,8 +65,8 @@ class NextcloudMigrator(BaseMigrator):
|
||||
|
||||
all_statuses = self.import_recipes_to_database(all_recipes)
|
||||
|
||||
for slug, status in all_statuses:
|
||||
for slug, recipe_id, status in all_statuses:
|
||||
if status:
|
||||
nc_dir: NextcloudDir = nextcloud_dirs[slug]
|
||||
if nc_dir.image:
|
||||
import_image(nc_dir.image, nc_dir.slug)
|
||||
import_image(nc_dir.image, recipe_id)
|
||||
|
||||
@@ -78,7 +78,7 @@ class PaprikaMigrator(BaseMigrator):
|
||||
|
||||
results = self.import_recipes_to_database(recipes)
|
||||
|
||||
for slug, status in results:
|
||||
for slug, recipe_id, status in results:
|
||||
if not status:
|
||||
continue
|
||||
|
||||
@@ -88,6 +88,6 @@ class PaprikaMigrator(BaseMigrator):
|
||||
with tempfile.NamedTemporaryFile(suffix=".jpeg") as temp_file:
|
||||
temp_file.write(image.read())
|
||||
path = Path(temp_file.name)
|
||||
import_image(path, slug)
|
||||
import_image(path, recipe_id)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to download image for {slug}: {e}")
|
||||
|
||||
@@ -8,6 +8,7 @@ from mealie.repos.all_repositories import AllRepositories
|
||||
from mealie.repos.repository_factory import RepositoryGeneric
|
||||
from mealie.schema.recipe import RecipeCategory
|
||||
from mealie.schema.recipe.recipe import RecipeTag
|
||||
from mealie.schema.recipe.recipe_category import CategoryOut, CategorySave, TagOut, TagSave
|
||||
|
||||
T = TypeVar("T", bound=BaseModel)
|
||||
|
||||
@@ -19,7 +20,9 @@ class DatabaseMigrationHelpers:
|
||||
self.session = session
|
||||
self.db = db
|
||||
|
||||
def _get_or_set_generic(self, accessor: RepositoryGeneric, items: list[str], out_model: T) -> list[T]:
|
||||
def _get_or_set_generic(
|
||||
self, accessor: RepositoryGeneric, items: list[str], create_model: T, out_model: T
|
||||
) -> list[T]:
|
||||
"""
|
||||
Utility model for getting or setting categories or tags. This will only work for those two cases.
|
||||
|
||||
@@ -30,22 +33,32 @@ class DatabaseMigrationHelpers:
|
||||
for item_name in items:
|
||||
slug_lookup = slugify(item_name)
|
||||
|
||||
item_model = accessor.get_one(slug_lookup, "slug", override_schema=out_model)
|
||||
item_model = accessor.get_one(value=slug_lookup, key="slug", override_schema=out_model)
|
||||
|
||||
if not item_model:
|
||||
item_model = accessor.create(
|
||||
out_model(
|
||||
create_model(
|
||||
group_id=self.group_id,
|
||||
name=item_name,
|
||||
slug=slug_lookup,
|
||||
)
|
||||
)
|
||||
|
||||
items_out.append(item_model.dict())
|
||||
|
||||
return items_out
|
||||
|
||||
def get_or_set_category(self, categories: list[str]) -> list[RecipeCategory]:
|
||||
return self._get_or_set_generic(self.db.categories, categories, RecipeCategory)
|
||||
return self._get_or_set_generic(
|
||||
self.db.categories.by_group(self.group_id),
|
||||
categories,
|
||||
CategorySave,
|
||||
CategoryOut,
|
||||
)
|
||||
|
||||
def get_or_set_tags(self, tags: list[str]) -> list[RecipeTag]:
|
||||
return self._get_or_set_generic(self.db.tags, tags, RecipeTag)
|
||||
return self._get_or_set_generic(
|
||||
self.db.tags.by_group(self.group_id),
|
||||
tags,
|
||||
TagSave,
|
||||
TagOut,
|
||||
)
|
||||
|
||||
@@ -2,8 +2,9 @@ import json
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
from pydantic import UUID4
|
||||
|
||||
from mealie.services.image import image
|
||||
from mealie.services.recipe.recipe_data_service import RecipeDataService
|
||||
|
||||
|
||||
class MigrationReaders:
|
||||
@@ -26,8 +27,7 @@ class MigrationReaders:
|
||||
with open(yaml_file, "r") as f:
|
||||
contents = f.read().split("---")
|
||||
recipe_data = {}
|
||||
for _, document in enumerate(contents):
|
||||
|
||||
for document in contents:
|
||||
# Check if None or Empty String
|
||||
if document is None or document == "":
|
||||
continue
|
||||
@@ -81,9 +81,10 @@ def glob_walker(directory: Path, glob_str: str, return_parent=True) -> list[Path
|
||||
return matches
|
||||
|
||||
|
||||
def import_image(src: Path, dest_slug: str):
|
||||
def import_image(src: Path, recipe_id: UUID4):
|
||||
"""Read the successful migrations attribute and for each import the image
|
||||
appropriately into the image directory. Minification is done in mass
|
||||
after the migration occurs.
|
||||
"""
|
||||
image.write_image(dest_slug, src, extension=src.suffix)
|
||||
data_service = RecipeDataService(recipe_id=recipe_id)
|
||||
data_service.write_image(src, src.suffix)
|
||||
|
||||
108
mealie/services/recipe/recipe_data_service.py
Normal file
108
mealie/services/recipe/recipe_data_service.py
Normal file
@@ -0,0 +1,108 @@
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
import requests
|
||||
from pydantic import UUID4
|
||||
|
||||
from mealie.pkgs import img
|
||||
from mealie.schema.recipe.recipe import Recipe
|
||||
from mealie.services._base_service import BaseService
|
||||
|
||||
_FIREFOX_UA = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:86.0) Gecko/20100101 Firefox/86.0"
|
||||
|
||||
|
||||
class RecipeDataService(BaseService):
|
||||
minifier: img.ABCMinifier
|
||||
|
||||
def __init__(self, recipe_id: UUID4, group_id: UUID4 = None) -> None:
|
||||
"""
|
||||
RecipeDataService is a service that consolidates the reading/writing actions related
|
||||
to assets, and images for a recipe.
|
||||
"""
|
||||
super().__init__()
|
||||
|
||||
self.recipe_id = recipe_id
|
||||
self.slug = group_id
|
||||
self.minifier = img.PillowMinifier(purge=True, logger=self.logger)
|
||||
|
||||
self.dir_data = Recipe.directory_from_id(self.recipe_id)
|
||||
self.dir_image = self.dir_data.joinpath("images")
|
||||
self.dir_assets = self.dir_data.joinpath("assets")
|
||||
|
||||
self.dir_image.mkdir(parents=True, exist_ok=True)
|
||||
self.dir_assets.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def delete_all_data(self) -> None:
|
||||
try:
|
||||
shutil.rmtree(self.dir_data)
|
||||
except Exception as e:
|
||||
self.logger.exception(f"Failed to delete recipe data: {e}")
|
||||
|
||||
def write_image(self, file_data: bytes, extension: str) -> Path:
|
||||
extension = extension.replace(".", "")
|
||||
image_path = self.dir_image.joinpath(f"original.{extension}")
|
||||
image_path.unlink(missing_ok=True)
|
||||
|
||||
if isinstance(file_data, Path):
|
||||
shutil.copy2(file_data, image_path)
|
||||
elif isinstance(file_data, bytes):
|
||||
with open(image_path, "ab") as f:
|
||||
f.write(file_data)
|
||||
else:
|
||||
with open(image_path, "ab") as f:
|
||||
shutil.copyfileobj(file_data, f)
|
||||
|
||||
self.minifier.minify(image_path)
|
||||
|
||||
return image_path
|
||||
|
||||
def scrape_image(self, image_url) -> None:
|
||||
self.logger.info(f"Image URL: {image_url}")
|
||||
|
||||
if isinstance(image_url, str): # Handles String Types
|
||||
pass
|
||||
|
||||
elif isinstance(image_url, list): # Handles List Types
|
||||
# Multiple images have been defined in the schema - usually different resolutions
|
||||
# Typically would be in smallest->biggest order, but can't be certain so test each.
|
||||
# 'Google will pick the best image to display in Search results based on the aspect ratio and resolution.'
|
||||
|
||||
all_image_requests = []
|
||||
for url in image_url:
|
||||
if isinstance(url, dict):
|
||||
url = url.get("url", "")
|
||||
try:
|
||||
r = requests.get(url, stream=True, headers={"User-Agent": _FIREFOX_UA})
|
||||
except Exception:
|
||||
self.logger.exception("Image {url} could not be requested")
|
||||
continue
|
||||
if r.status_code == 200:
|
||||
all_image_requests.append((url, r))
|
||||
|
||||
image_url, _ = max(all_image_requests, key=lambda url_r: len(url_r[1].content), default=("", 0))
|
||||
|
||||
elif isinstance(image_url, dict): # Handles Dictionary Types
|
||||
for key in image_url:
|
||||
if key == "url":
|
||||
image_url = image_url.get("url")
|
||||
|
||||
ext = image_url.split(".")[-1]
|
||||
|
||||
if ext not in img.IMAGE_EXTENSIONS:
|
||||
ext = "jpg" # Guess the extension
|
||||
|
||||
filename = str(self.recipe_id) + "." + ext
|
||||
filename = Recipe.directory_from_id(self.recipe_id).joinpath("images", filename)
|
||||
|
||||
try:
|
||||
r = requests.get(image_url, stream=True, headers={"User-Agent": _FIREFOX_UA})
|
||||
except Exception:
|
||||
self.logger.exception("Fatal Image Request Exception")
|
||||
return None
|
||||
|
||||
if r.status_code == 200:
|
||||
r.raw.decode_content = True
|
||||
self.logger.info(f"File Name Suffix {filename.suffix}")
|
||||
self.write_image(r.raw, filename.suffix)
|
||||
|
||||
filename.unlink(missing_ok=True)
|
||||
@@ -15,7 +15,7 @@ from mealie.schema.recipe.recipe_settings import RecipeSettings
|
||||
from mealie.schema.recipe.recipe_step import RecipeStep
|
||||
from mealie.schema.user.user import GroupInDB, PrivateUser
|
||||
from mealie.services._base_service import BaseService
|
||||
from mealie.services.image.image import write_image
|
||||
from mealie.services.recipe.recipe_data_service import RecipeDataService
|
||||
|
||||
from .template_service import TemplateService
|
||||
|
||||
@@ -142,7 +142,8 @@ class RecipeService(BaseService):
|
||||
recipe = self.create_one(Recipe(**recipe_dict))
|
||||
|
||||
if recipe:
|
||||
write_image(recipe.slug, recipe_image, "webp")
|
||||
data_service = RecipeDataService(recipe.id)
|
||||
data_service.write_image(recipe_image, "webp")
|
||||
|
||||
return recipe
|
||||
|
||||
|
||||
@@ -43,6 +43,9 @@ def clean_string(text: str) -> str:
|
||||
if isinstance(text, list):
|
||||
text = text[0]
|
||||
|
||||
if isinstance(text, int):
|
||||
text = str(text)
|
||||
|
||||
if text == "" or text is None:
|
||||
return ""
|
||||
|
||||
|
||||
@@ -5,8 +5,9 @@ from fastapi import HTTPException, status
|
||||
from slugify import slugify
|
||||
|
||||
from mealie.core.root_logger import get_logger
|
||||
from mealie.pkgs import cache
|
||||
from mealie.schema.recipe import Recipe
|
||||
from mealie.services.image.image import scrape_image
|
||||
from mealie.services.recipe.recipe_data_service import RecipeDataService
|
||||
|
||||
from .recipe_scraper import RecipeScraper
|
||||
|
||||
@@ -29,29 +30,26 @@ def create_from_url(url: str) -> Recipe:
|
||||
"""
|
||||
scraper = RecipeScraper()
|
||||
new_recipe = scraper.scrape(url)
|
||||
new_recipe.id = uuid4()
|
||||
|
||||
if not new_recipe:
|
||||
raise HTTPException(status.HTTP_400_BAD_REQUEST, {"details": ParserErrors.BAD_RECIPE_DATA.value})
|
||||
|
||||
logger = get_logger()
|
||||
logger.info(f"Image {new_recipe.image}")
|
||||
new_recipe.image = download_image_for_recipe(new_recipe.slug, new_recipe.image)
|
||||
|
||||
recipe_data_service = RecipeDataService(new_recipe.id)
|
||||
|
||||
try:
|
||||
recipe_data_service.scrape_image(new_recipe.image)
|
||||
new_recipe.name = slugify(new_recipe.name)
|
||||
new_recipe.image = cache.new_key(4)
|
||||
except Exception as e:
|
||||
recipe_data_service.logger.exception(f"Error Scraping Image: {e}")
|
||||
new_recipe.image = "no image"
|
||||
|
||||
if new_recipe.name is None or new_recipe.name == "":
|
||||
new_recipe.name = "No Recipe Found - " + uuid4().hex
|
||||
new_recipe.name = "No Recipe Name Found - " + str(uuid4())
|
||||
new_recipe.slug = slugify(new_recipe.name)
|
||||
|
||||
return new_recipe
|
||||
|
||||
|
||||
def download_image_for_recipe(slug, image_url) -> str | None:
|
||||
img_name = None
|
||||
try:
|
||||
img_path = scrape_image(image_url, slug)
|
||||
img_name = img_path.name
|
||||
except Exception as e:
|
||||
logger = get_logger()
|
||||
logger.error(f"Error Scraping Image: {e}")
|
||||
img_name = None
|
||||
|
||||
return img_name or "no image"
|
||||
|
||||
Reference in New Issue
Block a user