Merge branch 'mealie-next' into fix/translation-issues-when-scraping

This commit is contained in:
Michael Genson
2024-02-04 13:20:44 -06:00
committed by GitHub
183 changed files with 4530 additions and 2172 deletions

View File

@@ -2,10 +2,11 @@ import datetime
import uuid
from os import path
from pathlib import Path
from typing import Any
from fastapi.encoders import jsonable_encoder
from pydantic import BaseModel
from sqlalchemy import ForeignKeyConstraint, MetaData, create_engine, insert, text
from sqlalchemy import ForeignKey, ForeignKeyConstraint, MetaData, Table, create_engine, insert, text
from sqlalchemy.engine import base
from sqlalchemy.orm import sessionmaker
@@ -41,13 +42,27 @@ class AlchemyExporter(BaseService):
self.session_maker = sessionmaker(bind=self.engine)
@staticmethod
def is_uuid(value: str) -> bool:
def is_uuid(value: Any) -> bool:
try:
uuid.UUID(value)
return True
except ValueError:
return False
@staticmethod
def is_valid_foreign_key(db_dump: dict[str, list[dict]], fk: ForeignKey, fk_value: Any) -> bool:
if not fk_value:
return True
foreign_table_name = fk.column.table.name
foreign_field_name = fk.column.name
for row in db_dump.get(foreign_table_name, []):
if row[foreign_field_name] == fk_value:
return True
return False
def convert_types(self, data: dict) -> dict:
"""
walks the dictionary to restore all things that look like string representations of their complex types
@@ -70,6 +85,33 @@ class AlchemyExporter(BaseService):
data[key] = self.DateTimeParser(time=value).time
return data
def clean_rows(self, db_dump: dict[str, list[dict]], table: Table, rows: list[dict]) -> list[dict]:
"""
Checks rows against foreign key restraints and removes any rows that would violate them
"""
fks = table.foreign_keys
valid_rows = []
for row in rows:
is_valid_row = True
for fk in fks:
fk_value = row.get(fk.parent.name)
if self.is_valid_foreign_key(db_dump, fk, row.get(fk.parent.name)):
continue
is_valid_row = False
self.logger.warning(
f"Removing row from table {table.name} because of invalid foreign key {fk.parent.name}: {fk_value}"
)
self.logger.warning(f"Row: {row}")
break
if is_valid_row:
valid_rows.append(row)
return valid_rows
def dump_schema(self) -> dict:
"""
Returns the schema of the SQLAlchemy database as a python dictionary. This dictionary is wrapped by
@@ -125,6 +167,7 @@ class AlchemyExporter(BaseService):
if not rows:
continue
table = self.meta.tables[table_name]
rows = self.clean_rows(db_dump, table, rows)
connection.execute(table.delete())
connection.execute(insert(table), rows)

View File

@@ -9,8 +9,7 @@ from mealie.services.backups_v2.alchemy_exporter import AlchemyExporter
from mealie.services.backups_v2.backup_file import BackupFile
class BackupSchemaMismatch(Exception):
...
class BackupSchemaMismatch(Exception): ...
class BackupV2(BaseService):
@@ -69,7 +68,7 @@ class BackupV2(BaseService):
shutil.copytree(f, self.directories.DATA_DIR / f.name)
def restore(self, backup_path: Path) -> None:
self.logger.info("initially backup restore")
self.logger.info("initializing backup restore")
backup = BackupFile(backup_path)

View File

@@ -3,6 +3,10 @@ import typing
from abc import ABC, abstractmethod
from dataclasses import dataclass
from email import message
from email.utils import formatdate
from uuid import uuid4
from html2text import html2text
from mealie.services._base_service import BaseService
@@ -36,8 +40,20 @@ class Message:
msg["Subject"] = self.subject
msg["From"] = f"{self.mail_from_name} <{self.mail_from_address}>"
msg["To"] = to
msg["Date"] = formatdate(localtime=True)
msg.add_alternative(html2text(self.html), subtype="plain")
msg.add_alternative(self.html, subtype="html")
try:
message_id = f"<{uuid4()}@{self.mail_from_address.split('@')[1]}>"
except IndexError:
# this should never happen with a valid email address,
# but we let the SMTP server handle it instead of raising it here
message_id = f"<{uuid4()}@{self.mail_from_address}>"
msg["Message-ID"] = message_id
msg["MIME-Version"] = "1.0"
if smtp.ssl:
with smtplib.SMTP_SSL(smtp.host, smtp.port) as server:
if smtp.username and smtp.password:
@@ -57,8 +73,7 @@ class Message:
class ABCEmailSender(ABC):
@abstractmethod
def send(self, email_to: str, subject: str, html: str) -> bool:
...
def send(self, email_to: str, subject: str, html: str) -> bool: ...
class DefaultEmailSender(ABCEmailSender, BaseService):

View File

@@ -100,9 +100,12 @@ class AppriseEventListener(EventListenerBase):
return [
# We use query params to add custom key: value pairs to the Apprise payload by prepending the key with ":".
AppriseEventListener.merge_query_parameters(url, {f":{k}": v for k, v in params.items()})
# only certain endpoints support the custom key: value pairs, so we only apply them to those endpoints
if AppriseEventListener.is_custom_url(url) else url
(
AppriseEventListener.merge_query_parameters(url, {f":{k}": v for k, v in params.items()})
# only certain endpoints support the custom key: value pairs, so we only apply them to those endpoints
if AppriseEventListener.is_custom_url(url)
else url
)
for url in urls
]

View File

@@ -8,8 +8,7 @@ from mealie.services.event_bus_service.event_types import Event
class PublisherLike(Protocol):
def publish(self, event: Event, notification_urls: list[str]):
...
def publish(self, event: Event, notification_urls: list[str]): ...
class ApprisePublisher:

View File

@@ -37,12 +37,10 @@ class ABCExporter(BaseService):
super().__init__()
@abstractproperty
def destination_dir(self) -> str:
...
def destination_dir(self) -> str: ...
@abstractmethod
def items(self) -> Iterator[ExportedItem]:
...
def items(self) -> Iterator[ExportedItem]: ...
def _post_export_hook(self, _: BaseModel) -> None:
pass

View File

@@ -39,7 +39,7 @@ class MealieAlphaMigrator(BaseMigrator):
with contextlib.suppress(KeyError):
if "" in recipe["categories"]:
recipe["categories"] = [cat for cat in recipe["categories"] if cat != ""]
if type(recipe["extras"]) == list:
if isinstance(recipe["extras"], list):
recipe["extras"] = {}
recipe["comments"] = []

View File

@@ -23,8 +23,7 @@ def plantoeat_recipes(file: Path):
for name in Path(tmpdir).glob("**/[!.]*.csv"):
with open(name, newline="") as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
yield row
yield from reader
def get_value_as_string_or_none(dictionary: dict, key: str):

View File

@@ -187,7 +187,7 @@ def import_data(lines):
token = unclump(token)
# turn B-NAME/123 back into "name"
tag, confidence = re.split(r"/", columns[-1], 1)
tag, confidence = re.split(r"/", columns[-1], maxsplit=1)
tag = re.sub(r"^[BI]\-", "", tag).lower() # noqa: W605 - invalid dscape sequence
# ====================

View File

@@ -106,12 +106,10 @@ class ABCIngredientParser(ABC):
return 70
@abstractmethod
def parse_one(self, ingredient_string: str) -> ParsedIngredient:
...
def parse_one(self, ingredient_string: str) -> ParsedIngredient: ...
@abstractmethod
def parse(self, ingredients: list[str]) -> list[ParsedIngredient]:
...
def parse(self, ingredients: list[str]) -> list[ParsedIngredient]: ...
@classmethod
def find_match(cls, match_value: str, *, store_map: dict[str, T], fuzzy_match_threshold: int = 0) -> T | None:

View File

@@ -98,7 +98,7 @@ class RecipeBulkScraperService(BaseService):
tasks = [_do(b.url) for b in urls.imports]
results = await asyncio.gather(*tasks, return_exceptions=True)
for b, recipe in zip(urls.imports, results, strict=True):
if not recipe or isinstance(recipe, Exception):
if not recipe or isinstance(recipe, BaseException):
continue
if b.tags:

View File

@@ -84,8 +84,7 @@ class ABCScraperStrategy(ABC):
self.translator = translator
@abstractmethod
async def get_html(self, url: str) -> str:
...
async def get_html(self, url: str) -> str: ...
@abstractmethod
async def parse(self) -> tuple[Recipe, ScrapedExtras] | tuple[None, None]: