mirror of
https://github.com/mealie-recipes/mealie.git
synced 2026-01-08 10:01:32 -05:00
* add dependency injection for get_repositories * convert events api to controller * update generic typing * add abstract controllers * update test naming * migrate admin services to controllers * add additional admin route tests * remove print * add public shared dependencies * add types * fix typo * add static variables for recipe json keys * add coverage gutters config * update controller routers * add generic success response * add category/tag/tool tests * add token refresh test * add coverage utilities * covert comments to controller * add todo * add helper properties * delete old service * update test notes * add unit test for pretty_stats * remove dead code from post_webhooks * update group routes to use controllers * add additional group test coverage * abstract common permission checks * convert ingredient parser to controller * update recipe crud to use controller * remove dead-code * add class lifespan tracker for debugging * convert bulk export to controller * migrate tools router to controller * update recipe share to controller * move customer router to _base * ignore prints in flake8 * convert units and foods to new controllers * migrate user routes to controllers * centralize error handling * fix invalid ref * reorder fields * update routers to share common handling * update tests * remove prints * fix cookbooks delete * fix cookbook get * add controller for mealplanner * cover report routes to controller * remove __future__ imports * remove dead code * remove all base_http children and remove dead code
82 lines
2.7 KiB
Python
82 lines
2.7 KiB
Python
import shutil
|
|
from pathlib import Path
|
|
|
|
import requests
|
|
|
|
from mealie.core import root_logger
|
|
from mealie.schema.recipe import Recipe
|
|
from mealie.services.image import minify
|
|
|
|
|
|
def write_image(recipe_slug: str, file_data: bytes, extension: str) -> Path:
|
|
image_dir = Recipe(slug=recipe_slug).image_dir
|
|
extension = extension.replace(".", "")
|
|
image_path = image_dir.joinpath(f"original.{extension}")
|
|
image_path.unlink(missing_ok=True)
|
|
|
|
if isinstance(file_data, Path):
|
|
shutil.copy2(file_data, image_path)
|
|
elif isinstance(file_data, bytes):
|
|
with open(image_path, "ab") as f:
|
|
f.write(file_data)
|
|
else:
|
|
with open(image_path, "ab") as f:
|
|
shutil.copyfileobj(file_data, f)
|
|
|
|
minify.minify_image(image_path, force=True)
|
|
|
|
return image_path
|
|
|
|
|
|
def scrape_image(image_url: str, slug: str) -> Path:
|
|
logger = root_logger.get_logger()
|
|
logger.info(f"Image URL: {image_url}")
|
|
_FIREFOX_UA = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:86.0) Gecko/20100101 Firefox/86.0"
|
|
|
|
if isinstance(image_url, str): # Handles String Types
|
|
pass
|
|
|
|
if isinstance(image_url, list): # Handles List Types
|
|
# Multiple images have been defined in the schema - usually different resolutions
|
|
# Typically would be in smallest->biggest order, but can't be certain so test each.
|
|
# 'Google will pick the best image to display in Search results based on the aspect ratio and resolution.'
|
|
|
|
all_image_requests = []
|
|
for url in image_url:
|
|
if isinstance(url, dict):
|
|
url = url.get("url", "")
|
|
try:
|
|
r = requests.get(url, stream=True, headers={"User-Agent": _FIREFOX_UA})
|
|
except Exception:
|
|
logger.exception("Image {url} could not be requested")
|
|
continue
|
|
if r.status_code == 200:
|
|
all_image_requests.append((url, r))
|
|
|
|
image_url, _ = max(all_image_requests, key=lambda url_r: len(url_r[1].content), default=("", 0))
|
|
|
|
if isinstance(image_url, dict): # Handles Dictionary Types
|
|
for key in image_url:
|
|
if key == "url":
|
|
image_url = image_url.get("url")
|
|
|
|
filename = slug + "." + image_url.split(".")[-1]
|
|
filename = Recipe(slug=slug).image_dir.joinpath(filename)
|
|
|
|
try:
|
|
r = requests.get(image_url, stream=True, headers={"User-Agent": _FIREFOX_UA})
|
|
except Exception:
|
|
logger.exception("Fatal Image Request Exception")
|
|
return None
|
|
|
|
if r.status_code == 200:
|
|
r.raw.decode_content = True
|
|
logger.info(f"File Name Suffix {filename.suffix}")
|
|
write_image(slug, r.raw, filename.suffix)
|
|
|
|
filename.unlink(missing_ok=True)
|
|
|
|
return Path(slug)
|
|
|
|
return None
|