mirror of
https://github.com/mealie-recipes/mealie.git
synced 2025-12-15 06:45:23 -05:00
improve developer tooling (backend) (#1051)
* add basic pre-commit file * add flake8 * add isort * add pep585-upgrade (typing upgrades) * use namespace for import * add mypy * update ci for backend * flake8 scope * fix version format * update makefile * disable strict option (temporary) * fix mypy issues * upgrade type hints (pre-commit) * add vscode typing check * add types to dev deps * remote container draft * update setup script * update compose version * run setup on create * dev containers update * remove unused pages * update setup tips * expose ports * Update pre-commit to include flask8-print (#1053) * Add in flake8-print to pre-commit * pin version of flake8-print * formatting * update getting strated docs * add mypy to pre-commit * purge .mypy_cache on clean * drop mypy Co-authored-by: zackbcom <zackbcom@users.noreply.github.com>
This commit is contained in:
@@ -39,7 +39,7 @@ class ExportDatabase:
|
||||
try:
|
||||
self.templates = [app_dirs.TEMPLATE_DIR.joinpath(x) for x in templates]
|
||||
except Exception:
|
||||
self.templates = False
|
||||
self.templates = []
|
||||
logger.info("No Jinja2 Templates Registered for Export")
|
||||
|
||||
required_dirs = [
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import json
|
||||
import shutil
|
||||
import zipfile
|
||||
from collections.abc import Callable
|
||||
from pathlib import Path
|
||||
from typing import Callable
|
||||
|
||||
from pydantic.main import BaseModel
|
||||
from sqlalchemy.orm.session import Session
|
||||
@@ -140,7 +140,7 @@ class ImportDatabase:
|
||||
|
||||
if image_dir.exists(): # Migrate from before v0.5.0
|
||||
for image in image_dir.iterdir():
|
||||
item: Recipe = successful_imports.get(image.stem)
|
||||
item: Recipe = successful_imports.get(image.stem) # type: ignore
|
||||
|
||||
if item:
|
||||
dest_dir = item.image_dir
|
||||
@@ -294,7 +294,7 @@ def import_database(
|
||||
settings_report = import_session.import_settings() if import_settings else []
|
||||
group_report = import_session.import_groups() if import_groups else []
|
||||
user_report = import_session.import_users() if import_users else []
|
||||
notification_report = []
|
||||
notification_report: list = []
|
||||
|
||||
import_session.clean_up()
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ from fastapi.encoders import jsonable_encoder
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import MetaData, create_engine
|
||||
from sqlalchemy.engine import base
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from mealie.services._base_service import BaseService
|
||||
|
||||
@@ -122,8 +122,6 @@ class AlchemyExporter(BaseService):
|
||||
"""Drops all data from the database"""
|
||||
self.meta.reflect(bind=self.engine)
|
||||
with self.session_maker() as session:
|
||||
session: Session
|
||||
|
||||
is_postgres = self.settings.DB_ENGINE == "postgres"
|
||||
|
||||
try:
|
||||
|
||||
@@ -23,7 +23,7 @@ class DefaultEmailSender(ABCEmailSender, BaseService):
|
||||
mail_from=(self.settings.SMTP_FROM_NAME, self.settings.SMTP_FROM_EMAIL),
|
||||
)
|
||||
|
||||
smtp_options = {"host": self.settings.SMTP_HOST, "port": self.settings.SMTP_PORT}
|
||||
smtp_options: dict[str, str | bool] = {"host": self.settings.SMTP_HOST, "port": self.settings.SMTP_PORT}
|
||||
if self.settings.SMTP_TLS:
|
||||
smtp_options["tls"] = True
|
||||
if self.settings.SMTP_USER:
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import zipfile
|
||||
from abc import abstractmethod, abstractproperty
|
||||
from collections.abc import Iterator
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Callable, Iterator, Optional
|
||||
from typing import Callable, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel
|
||||
@@ -27,7 +28,7 @@ class ExportedItem:
|
||||
|
||||
|
||||
class ABCExporter(BaseService):
|
||||
write_dir_to_zip: Callable[[Path, str, Optional[list[str]]], None]
|
||||
write_dir_to_zip: Callable[[Path, str, Optional[set[str]]], None] | None
|
||||
|
||||
def __init__(self, db: AllRepositories, group_id: UUID) -> None:
|
||||
self.logger = get_logger()
|
||||
@@ -47,8 +48,7 @@ class ABCExporter(BaseService):
|
||||
def _post_export_hook(self, _: BaseModel) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def export(self, zip: zipfile.ZipFile) -> list[ReportEntryCreate]:
|
||||
def export(self, zip: zipfile.ZipFile) -> list[ReportEntryCreate]: # type: ignore
|
||||
"""
|
||||
Export takes in a zip file and exports the recipes to it. Note that the zip
|
||||
file open/close is NOT handled by this method. You must handle it yourself.
|
||||
@@ -57,7 +57,7 @@ class ABCExporter(BaseService):
|
||||
zip (zipfile.ZipFile): Zip file destination
|
||||
|
||||
Returns:
|
||||
list[ReportEntryCreate]: [description] ???!?!
|
||||
list[ReportEntryCreate]:
|
||||
"""
|
||||
self.write_dir_to_zip = self.write_dir_to_zip_func(zip)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Iterator
|
||||
from collections.abc import Iterator
|
||||
from uuid import UUID
|
||||
|
||||
from mealie.repos.all_repositories import AllRepositories
|
||||
@@ -37,5 +37,5 @@ class RecipeExporter(ABCExporter):
|
||||
"""Copy recipe directory contents into the zip folder"""
|
||||
recipe_dir = item.directory
|
||||
|
||||
if recipe_dir.exists():
|
||||
if recipe_dir.exists() and self.write_dir_to_zip:
|
||||
self.write_dir_to_zip(recipe_dir, f"{self.destination_dir}/{item.slug}", {".json"})
|
||||
|
||||
@@ -168,7 +168,7 @@ class ShoppingListService:
|
||||
found = False
|
||||
|
||||
for ref in item.recipe_references:
|
||||
remove_qty = 0
|
||||
remove_qty = 0.0
|
||||
|
||||
if ref.recipe_id == recipe_id:
|
||||
self.list_item_refs.delete(ref.id)
|
||||
@@ -199,4 +199,4 @@ class ShoppingListService:
|
||||
break
|
||||
|
||||
# Save Changes
|
||||
return self.shopping_lists.get(shopping_list.id)
|
||||
return self.shopping_lists.get_one(shopping_list.id)
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from pathlib import Path
|
||||
from typing import Tuple
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import UUID4
|
||||
@@ -94,9 +93,10 @@ class BaseMigrator(BaseService):
|
||||
self._create_report(report_name)
|
||||
self._migrate()
|
||||
self._save_all_entries()
|
||||
return self.db.group_reports.get(self.report_id)
|
||||
|
||||
def import_recipes_to_database(self, validated_recipes: list[Recipe]) -> list[Tuple[str, UUID4, bool]]:
|
||||
return self.db.group_reports.get_one(self.report_id)
|
||||
|
||||
def import_recipes_to_database(self, validated_recipes: list[Recipe]) -> list[tuple[str, UUID4, bool]]:
|
||||
"""
|
||||
Used as a single access point to process a list of Recipe objects into the
|
||||
database in a predictable way. If an error occurs the session is rolled back
|
||||
|
||||
@@ -67,6 +67,6 @@ class NextcloudMigrator(BaseMigrator):
|
||||
|
||||
for slug, recipe_id, status in all_statuses:
|
||||
if status:
|
||||
nc_dir: NextcloudDir = nextcloud_dirs[slug]
|
||||
nc_dir = nextcloud_dirs[slug]
|
||||
if nc_dir.image:
|
||||
import_image(nc_dir.image, recipe_id)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from collections.abc import Iterable
|
||||
from typing import TypeVar
|
||||
|
||||
from pydantic import UUID4, BaseModel
|
||||
@@ -14,14 +15,14 @@ T = TypeVar("T", bound=BaseModel)
|
||||
|
||||
|
||||
class DatabaseMigrationHelpers:
|
||||
def __init__(self, db: AllRepositories, session: Session, group_id: int, user_id: UUID4) -> None:
|
||||
def __init__(self, db: AllRepositories, session: Session, group_id: UUID4, user_id: UUID4) -> None:
|
||||
self.group_id = group_id
|
||||
self.user_id = user_id
|
||||
self.session = session
|
||||
self.db = db
|
||||
|
||||
def _get_or_set_generic(
|
||||
self, accessor: RepositoryGeneric, items: list[str], create_model: T, out_model: T
|
||||
self, accessor: RepositoryGeneric, items: Iterable[str], create_model: type[T], out_model: type[T]
|
||||
) -> list[T]:
|
||||
"""
|
||||
Utility model for getting or setting categories or tags. This will only work for those two cases.
|
||||
@@ -47,7 +48,7 @@ class DatabaseMigrationHelpers:
|
||||
items_out.append(item_model.dict())
|
||||
return items_out
|
||||
|
||||
def get_or_set_category(self, categories: list[str]) -> list[RecipeCategory]:
|
||||
def get_or_set_category(self, categories: Iterable[str]) -> list[RecipeCategory]:
|
||||
return self._get_or_set_generic(
|
||||
self.db.categories.by_group(self.group_id),
|
||||
categories,
|
||||
@@ -55,7 +56,7 @@ class DatabaseMigrationHelpers:
|
||||
CategoryOut,
|
||||
)
|
||||
|
||||
def get_or_set_tags(self, tags: list[str]) -> list[RecipeTag]:
|
||||
def get_or_set_tags(self, tags: Iterable[str]) -> list[RecipeTag]:
|
||||
return self._get_or_set_generic(
|
||||
self.db.tags.by_group(self.group_id),
|
||||
tags,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from typing import Callable, Optional
|
||||
from collections.abc import Callable
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
@@ -10,10 +10,10 @@ def move_parens_to_end(ing_str) -> str:
|
||||
If no parentheses are found, the string is returned unchanged.
|
||||
"""
|
||||
if re.match(compiled_match, ing_str):
|
||||
match = re.search(compiled_search, ing_str)
|
||||
start = match.start()
|
||||
end = match.end()
|
||||
ing_str = ing_str[:start] + ing_str[end:] + " " + ing_str[start:end]
|
||||
if match := re.search(compiled_search, ing_str):
|
||||
start = match.start()
|
||||
end = match.end()
|
||||
ing_str = ing_str[:start] + ing_str[end:] + " " + ing_str[start:end]
|
||||
|
||||
return ing_str
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import string
|
||||
import unicodedata
|
||||
from typing import Tuple
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
@@ -10,7 +9,7 @@ from .._helpers import check_char, move_parens_to_end
|
||||
class BruteParsedIngredient(BaseModel):
|
||||
food: str = ""
|
||||
note: str = ""
|
||||
amount: float = ""
|
||||
amount: float = 1.0
|
||||
unit: str = ""
|
||||
|
||||
class Config:
|
||||
@@ -31,7 +30,7 @@ def parse_fraction(x):
|
||||
raise ValueError
|
||||
|
||||
|
||||
def parse_amount(ing_str) -> Tuple[float, str, str]:
|
||||
def parse_amount(ing_str) -> tuple[float, str, str]:
|
||||
def keep_looping(ing_str, end) -> bool:
|
||||
"""
|
||||
Checks if:
|
||||
@@ -48,7 +47,9 @@ def parse_amount(ing_str) -> Tuple[float, str, str]:
|
||||
if check_char(ing_str[end], ".", ",", "/") and end + 1 < len(ing_str) and ing_str[end + 1] in string.digits:
|
||||
return True
|
||||
|
||||
amount = 0
|
||||
return False
|
||||
|
||||
amount = 0.0
|
||||
unit = ""
|
||||
note = ""
|
||||
|
||||
@@ -87,7 +88,7 @@ def parse_amount(ing_str) -> Tuple[float, str, str]:
|
||||
return amount, unit, note
|
||||
|
||||
|
||||
def parse_ingredient_with_comma(tokens) -> Tuple[str, str]:
|
||||
def parse_ingredient_with_comma(tokens) -> tuple[str, str]:
|
||||
ingredient = ""
|
||||
note = ""
|
||||
start = 0
|
||||
@@ -105,7 +106,7 @@ def parse_ingredient_with_comma(tokens) -> Tuple[str, str]:
|
||||
return ingredient, note
|
||||
|
||||
|
||||
def parse_ingredient(tokens) -> Tuple[str, str]:
|
||||
def parse_ingredient(tokens) -> tuple[str, str]:
|
||||
ingredient = ""
|
||||
note = ""
|
||||
if tokens[-1].endswith(")"):
|
||||
@@ -132,7 +133,7 @@ def parse_ingredient(tokens) -> Tuple[str, str]:
|
||||
|
||||
|
||||
def parse(ing_str) -> BruteParsedIngredient:
|
||||
amount = 0
|
||||
amount = 0.0
|
||||
unit = ""
|
||||
ingredient = ""
|
||||
note = ""
|
||||
|
||||
@@ -5,6 +5,8 @@ from pathlib import Path
|
||||
|
||||
from pydantic import BaseModel, validator
|
||||
|
||||
from mealie.schema._mealie.types import NoneFloat
|
||||
|
||||
from . import utils
|
||||
from .pre_processor import pre_process_string
|
||||
|
||||
@@ -14,10 +16,10 @@ MODEL_PATH = CWD / "model.crfmodel"
|
||||
|
||||
class CRFConfidence(BaseModel):
|
||||
average: float = 0.0
|
||||
comment: float = None
|
||||
name: float = None
|
||||
unit: float = None
|
||||
qty: float = None
|
||||
comment: NoneFloat = None
|
||||
name: NoneFloat = None
|
||||
unit: NoneFloat = None
|
||||
qty: NoneFloat = None
|
||||
|
||||
|
||||
class CRFIngredient(BaseModel):
|
||||
|
||||
@@ -99,7 +99,7 @@ class NLPParser(ABCIngredientParser):
|
||||
return [self._crf_to_ingredient(crf_model) for crf_model in crf_models]
|
||||
|
||||
def parse_one(self, ingredient: str) -> ParsedIngredient:
|
||||
items = self.parse_one([ingredient])
|
||||
items = self.parse([ingredient])
|
||||
return items[0]
|
||||
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ class RecipeDataService(BaseService):
|
||||
except Exception as e:
|
||||
self.logger.exception(f"Failed to delete recipe data: {e}")
|
||||
|
||||
def write_image(self, file_data: bytes, extension: str) -> Path:
|
||||
def write_image(self, file_data: bytes | Path, extension: str) -> Path:
|
||||
extension = extension.replace(".", "")
|
||||
image_path = self.dir_image.joinpath(f"original.{extension}")
|
||||
image_path.unlink(missing_ok=True)
|
||||
@@ -91,8 +91,8 @@ class RecipeDataService(BaseService):
|
||||
if ext not in img.IMAGE_EXTENSIONS:
|
||||
ext = "jpg" # Guess the extension
|
||||
|
||||
filename = str(self.recipe_id) + "." + ext
|
||||
filename = Recipe.directory_from_id(self.recipe_id).joinpath("images", filename)
|
||||
file_name = f"{str(self.recipe_id)}.{ext}"
|
||||
file_path = Recipe.directory_from_id(self.recipe_id).joinpath("images", file_name)
|
||||
|
||||
try:
|
||||
r = requests.get(image_url, stream=True, headers={"User-Agent": _FIREFOX_UA})
|
||||
@@ -102,7 +102,7 @@ class RecipeDataService(BaseService):
|
||||
|
||||
if r.status_code == 200:
|
||||
r.raw.decode_content = True
|
||||
self.logger.info(f"File Name Suffix {filename.suffix}")
|
||||
self.write_image(r.raw, filename.suffix)
|
||||
self.logger.info(f"File Name Suffix {file_path.suffix}")
|
||||
self.write_image(r.raw, file_path.suffix)
|
||||
|
||||
filename.unlink(missing_ok=True)
|
||||
file_path.unlink(missing_ok=True)
|
||||
|
||||
@@ -69,7 +69,6 @@ class RecipeService(BaseService):
|
||||
all_asset_files = [x.file_name for x in recipe.assets]
|
||||
|
||||
for file in recipe.asset_dir.iterdir():
|
||||
file: Path
|
||||
if file.is_dir():
|
||||
continue
|
||||
if file.name not in all_asset_files:
|
||||
@@ -102,13 +101,13 @@ class RecipeService(BaseService):
|
||||
|
||||
def create_one(self, create_data: Union[Recipe, CreateRecipe]) -> Recipe:
|
||||
|
||||
create_data: Recipe = self._recipe_creation_factory(
|
||||
data: Recipe = self._recipe_creation_factory(
|
||||
self.user,
|
||||
name=create_data.name,
|
||||
additional_attrs=create_data.dict(),
|
||||
)
|
||||
|
||||
create_data.settings = RecipeSettings(
|
||||
data.settings = RecipeSettings(
|
||||
public=self.group.preferences.recipe_public,
|
||||
show_nutrition=self.group.preferences.recipe_show_nutrition,
|
||||
show_assets=self.group.preferences.recipe_show_assets,
|
||||
@@ -117,7 +116,7 @@ class RecipeService(BaseService):
|
||||
disable_amount=self.group.preferences.recipe_disable_amount,
|
||||
)
|
||||
|
||||
return self.repos.recipes.create(create_data)
|
||||
return self.repos.recipes.create(data)
|
||||
|
||||
def create_from_zip(self, archive: UploadFile, temp_path: Path) -> Recipe:
|
||||
"""
|
||||
|
||||
@@ -27,7 +27,7 @@ class TemplateService(BaseService):
|
||||
super().__init__()
|
||||
|
||||
@property
|
||||
def templates(self) -> list:
|
||||
def templates(self) -> dict[str, list[str]]:
|
||||
"""
|
||||
Returns a list of all templates available to render.
|
||||
"""
|
||||
@@ -78,6 +78,8 @@ class TemplateService(BaseService):
|
||||
if t_type == TemplateType.zip:
|
||||
return self._render_zip(recipe)
|
||||
|
||||
raise ValueError(f"Template Type '{t_type}' not found.")
|
||||
|
||||
def _render_json(self, recipe: Recipe) -> Path:
|
||||
"""
|
||||
Renders a JSON file in a temporary directory and returns
|
||||
@@ -98,18 +100,18 @@ class TemplateService(BaseService):
|
||||
"""
|
||||
self.__check_temp(self._render_jinja2)
|
||||
|
||||
j2_template: Path = self.directories.TEMPLATE_DIR / j2_template
|
||||
j2_path: Path = self.directories.TEMPLATE_DIR / j2_template
|
||||
|
||||
if not j2_template.is_file():
|
||||
raise FileNotFoundError(f"Template '{j2_template}' not found.")
|
||||
if not j2_path.is_file():
|
||||
raise FileNotFoundError(f"Template '{j2_path}' not found.")
|
||||
|
||||
with open(j2_template, "r") as f:
|
||||
with open(j2_path, "r") as f:
|
||||
template_text = f.read()
|
||||
|
||||
template = Template(template_text)
|
||||
rendered_text = template.render(recipe=recipe.dict(by_alias=True))
|
||||
|
||||
save_name = f"{recipe.slug}{j2_template.suffix}"
|
||||
save_name = f"{recipe.slug}{j2_path.suffix}"
|
||||
|
||||
save_path = self.temp.joinpath(save_name)
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Callable, Tuple
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
@@ -17,7 +17,7 @@ class Cron:
|
||||
|
||||
@dataclass
|
||||
class ScheduledFunc(BaseModel):
|
||||
id: Tuple[str, int]
|
||||
id: tuple[str, int]
|
||||
name: str
|
||||
hour: int
|
||||
minutes: int
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Callable, Iterable
|
||||
from collections.abc import Callable, Iterable
|
||||
|
||||
from mealie.core import root_logger
|
||||
|
||||
|
||||
@@ -49,30 +49,26 @@ class SchedulerService:
|
||||
|
||||
@staticmethod
|
||||
def add_cron_job(job_func: ScheduledFunc):
|
||||
SchedulerService.scheduler.add_job(
|
||||
SchedulerService.scheduler.add_job( # type: ignore
|
||||
job_func.callback,
|
||||
trigger="cron",
|
||||
name=job_func.id,
|
||||
hour=job_func.hour,
|
||||
minute=job_func.minutes,
|
||||
max_instances=job_func.max_instances,
|
||||
max_instances=job_func.max_instances, # type: ignore
|
||||
replace_existing=job_func.replace_existing,
|
||||
args=job_func.args,
|
||||
)
|
||||
|
||||
# SchedulerService._job_store[job_func.id] = job_func
|
||||
|
||||
@staticmethod
|
||||
def update_cron_job(job_func: ScheduledFunc):
|
||||
SchedulerService.scheduler.reschedule_job(
|
||||
SchedulerService.scheduler.reschedule_job( # type: ignore
|
||||
job_func.id,
|
||||
trigger="cron",
|
||||
hour=job_func.hour,
|
||||
minute=job_func.minutes,
|
||||
)
|
||||
|
||||
# SchedulerService._job_store[job_func.id] = job_func
|
||||
|
||||
|
||||
def _scheduled_task_wrapper(callable):
|
||||
try:
|
||||
|
||||
@@ -39,7 +39,8 @@ def purge_excess_files() -> None:
|
||||
limit = datetime.datetime.now() - datetime.timedelta(minutes=ONE_DAY_AS_MINUTES * 2)
|
||||
|
||||
for file in directories.GROUPS_DIR.glob("**/export/*.zip"):
|
||||
if file.stat().st_mtime < limit:
|
||||
# TODO: fix comparison types
|
||||
if file.stat().st_mtime < limit: # type: ignore
|
||||
file.unlink()
|
||||
logger.info(f"excess group file removed '{file}'")
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ def post_webhooks(webhook_id: int, session: Session = None):
|
||||
if not todays_recipe:
|
||||
return
|
||||
|
||||
payload = json.loads([x.json(by_alias=True) for x in todays_recipe])
|
||||
payload = json.loads([x.json(by_alias=True) for x in todays_recipe]) # type: ignore
|
||||
response = requests.post(webhook.url, json=payload)
|
||||
|
||||
if response.status_code != 200:
|
||||
|
||||
@@ -2,7 +2,7 @@ import html
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
|
||||
from slugify import slugify
|
||||
|
||||
@@ -33,7 +33,7 @@ def clean(recipe_data: dict, url=None) -> dict:
|
||||
recipe_data["recipeIngredient"] = ingredient(recipe_data.get("recipeIngredient"))
|
||||
recipe_data["recipeInstructions"] = instructions(recipe_data.get("recipeInstructions"))
|
||||
recipe_data["image"] = image(recipe_data.get("image"))
|
||||
recipe_data["slug"] = slugify(recipe_data.get("name"))
|
||||
recipe_data["slug"] = slugify(recipe_data.get("name")) # type: ignore
|
||||
recipe_data["orgURL"] = url
|
||||
|
||||
return recipe_data
|
||||
@@ -127,7 +127,7 @@ def image(image=None) -> str:
|
||||
raise Exception(f"Unrecognised image URL format: {image}")
|
||||
|
||||
|
||||
def instructions(instructions) -> List[dict]:
|
||||
def instructions(instructions) -> list[dict]:
|
||||
try:
|
||||
instructions = json.loads(instructions)
|
||||
except Exception:
|
||||
@@ -162,7 +162,8 @@ def instructions(instructions) -> List[dict]:
|
||||
sectionSteps = []
|
||||
for step in instructions:
|
||||
if step["@type"] == "HowToSection":
|
||||
[sectionSteps.append(item) for item in step["itemListElement"]]
|
||||
for sectionStep in step["itemListElement"]:
|
||||
sectionSteps.append(sectionStep)
|
||||
|
||||
if len(sectionSteps) > 0:
|
||||
return [{"text": _instruction(step["text"])} for step in sectionSteps if step["@type"] == "HowToStep"]
|
||||
@@ -183,6 +184,8 @@ def instructions(instructions) -> List[dict]:
|
||||
else:
|
||||
raise Exception(f"Unrecognised instruction format: {instructions}")
|
||||
|
||||
return []
|
||||
|
||||
|
||||
def _instruction(line) -> str:
|
||||
if isinstance(line, dict):
|
||||
@@ -199,7 +202,7 @@ def _instruction(line) -> str:
|
||||
return clean_line
|
||||
|
||||
|
||||
def ingredient(ingredients: list) -> str:
|
||||
def ingredient(ingredients: list | None) -> list[str]:
|
||||
if ingredients:
|
||||
return [clean_string(ing) for ing in ingredients]
|
||||
else:
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
from typing import Type
|
||||
|
||||
from mealie.schema.recipe.recipe import Recipe
|
||||
|
||||
from .scraper_strategies import ABCScraperStrategy, RecipeScraperOpenGraph, RecipeScraperPackage
|
||||
@@ -11,9 +9,9 @@ class RecipeScraper:
|
||||
"""
|
||||
|
||||
# List of recipe scrapers. Note that order matters
|
||||
scrapers: list[Type[ABCScraperStrategy]]
|
||||
scrapers: list[type[ABCScraperStrategy]]
|
||||
|
||||
def __init__(self, scrapers: list[Type[ABCScraperStrategy]] = None) -> None:
|
||||
def __init__(self, scrapers: list[type[ABCScraperStrategy]] = None) -> None:
|
||||
if scrapers is None:
|
||||
scrapers = [
|
||||
RecipeScraperPackage,
|
||||
@@ -27,8 +25,8 @@ class RecipeScraper:
|
||||
Scrapes a recipe from the web.
|
||||
"""
|
||||
|
||||
for scraper in self.scrapers:
|
||||
scraper = scraper(url)
|
||||
for scraper_type in self.scrapers:
|
||||
scraper = scraper_type(url)
|
||||
recipe = scraper.parse()
|
||||
|
||||
if recipe is not None:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, Callable, Tuple
|
||||
from typing import Any, Callable
|
||||
|
||||
import extruct
|
||||
import requests
|
||||
@@ -26,7 +26,7 @@ class ABCScraperStrategy(ABC):
|
||||
self.url = url
|
||||
|
||||
@abstractmethod
|
||||
def parse(self, recipe_url: str) -> Recipe | None:
|
||||
def parse(self) -> Recipe | None:
|
||||
"""Parse a recipe from a web URL.
|
||||
|
||||
Args:
|
||||
@@ -40,7 +40,7 @@ class ABCScraperStrategy(ABC):
|
||||
|
||||
class RecipeScraperPackage(ABCScraperStrategy):
|
||||
def clean_scraper(self, scraped_data: SchemaScraperFactory.SchemaScraper, url: str) -> Recipe:
|
||||
def try_get_default(func_call: Callable, get_attr: str, default: Any, clean_func=None):
|
||||
def try_get_default(func_call: Callable | None, get_attr: str, default: Any, clean_func=None):
|
||||
value = default
|
||||
try:
|
||||
value = func_call()
|
||||
@@ -143,7 +143,7 @@ class RecipeScraperOpenGraph(ABCScraperStrategy):
|
||||
def get_html(self) -> str:
|
||||
return requests.get(self.url).text
|
||||
|
||||
def get_recipe_fields(self, html) -> dict:
|
||||
def get_recipe_fields(self, html) -> dict | None:
|
||||
"""
|
||||
Get the recipe fields from the Open Graph data.
|
||||
"""
|
||||
@@ -151,7 +151,7 @@ class RecipeScraperOpenGraph(ABCScraperStrategy):
|
||||
def og_field(properties: dict, field_name: str) -> str:
|
||||
return next((val for name, val in properties if name == field_name), None)
|
||||
|
||||
def og_fields(properties: list[Tuple[str, str]], field_name: str) -> list[str]:
|
||||
def og_fields(properties: list[tuple[str, str]], field_name: str) -> list[str]:
|
||||
return list({val for name, val in properties if name == field_name})
|
||||
|
||||
base_url = get_base_url(html, self.url)
|
||||
@@ -159,7 +159,7 @@ class RecipeScraperOpenGraph(ABCScraperStrategy):
|
||||
try:
|
||||
properties = data["opengraph"][0]["properties"]
|
||||
except Exception:
|
||||
return
|
||||
return None
|
||||
|
||||
return {
|
||||
"name": og_field(properties, "og:title"),
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from collections.abc import Callable
|
||||
from random import getrandbits
|
||||
from time import sleep
|
||||
from typing import Any, Callable
|
||||
from typing import Any
|
||||
|
||||
from fastapi import BackgroundTasks
|
||||
from pydantic import UUID4
|
||||
|
||||
@@ -16,13 +16,13 @@ class PasswordResetService(BaseService):
|
||||
self.db = get_repositories(session)
|
||||
super().__init__()
|
||||
|
||||
def generate_reset_token(self, email: str) -> SavePasswordResetToken:
|
||||
def generate_reset_token(self, email: str) -> SavePasswordResetToken | None:
|
||||
user = self.db.users.get_one(email, "email")
|
||||
|
||||
if user is None:
|
||||
logger.error(f"failed to create password reset for {email=}: user doesn't exists")
|
||||
# Do not raise exception here as we don't want to confirm to the client that the Email doens't exists
|
||||
return
|
||||
return None
|
||||
|
||||
# Create Reset Token
|
||||
token = url_safe_token()
|
||||
|
||||
@@ -66,7 +66,7 @@ class RegistrationService:
|
||||
token_entry = self.repos.group_invite_tokens.get_one(registration.group_token)
|
||||
if not token_entry:
|
||||
raise HTTPException(status.HTTP_400_BAD_REQUEST, {"message": "Invalid group token"})
|
||||
group = self.repos.groups.get(token_entry.group_id)
|
||||
group = self.repos.groups.get_one(token_entry.group_id)
|
||||
else:
|
||||
raise HTTPException(status.HTTP_400_BAD_REQUEST, {"message": "Missing group"})
|
||||
|
||||
|
||||
Reference in New Issue
Block a user