mirror of
https://github.com/mealie-recipes/mealie.git
synced 2025-12-30 05:47:09 -05:00
chore: update linters (#2095)
* update deps * ruff auto-fixes * refactor match statements where possible
This commit is contained in:
@@ -1,7 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterable
|
||||
from math import ceil
|
||||
from typing import Any, Generic, Iterable, TypeVar
|
||||
from typing import Any, Generic, TypeVar
|
||||
|
||||
from fastapi import HTTPException
|
||||
from pydantic import UUID4, BaseModel
|
||||
|
||||
@@ -43,18 +43,18 @@ class GroupMigrationController(BaseUserController):
|
||||
"add_migration_tag": add_migration_tag,
|
||||
}
|
||||
|
||||
migrator: BaseMigrator
|
||||
table: dict[SupportedMigrations, type[BaseMigrator]] = {
|
||||
SupportedMigrations.chowdown: ChowdownMigrator,
|
||||
SupportedMigrations.mealie_alpha: MealieAlphaMigrator,
|
||||
SupportedMigrations.nextcloud: NextcloudMigrator,
|
||||
SupportedMigrations.paprika: PaprikaMigrator,
|
||||
}
|
||||
|
||||
match migration_type: # noqa match not supported by ruff
|
||||
case SupportedMigrations.chowdown:
|
||||
migrator = ChowdownMigrator(**args)
|
||||
case SupportedMigrations.mealie_alpha:
|
||||
migrator = MealieAlphaMigrator(**args)
|
||||
case SupportedMigrations.nextcloud:
|
||||
migrator = NextcloudMigrator(**args)
|
||||
case SupportedMigrations.paprika:
|
||||
migrator = PaprikaMigrator(**args)
|
||||
case _:
|
||||
raise ValueError(f"Unsupported migration type: {migration_type}")
|
||||
constructor = table.get(migration_type, None)
|
||||
|
||||
if constructor is None:
|
||||
raise ValueError(f"Unsupported migration type: {migration_type}")
|
||||
|
||||
migrator = constructor(**args)
|
||||
|
||||
return migrator.migrate(f"{migration_type.value.title()} Migration")
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from collections.abc import Callable
|
||||
from functools import cached_property
|
||||
from typing import Callable
|
||||
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from pydantic import UUID4
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from functools import cached_property
|
||||
from shutil import copyfileobj
|
||||
from typing import Optional
|
||||
from zipfile import ZipFile
|
||||
|
||||
import orjson
|
||||
@@ -28,7 +27,6 @@ from mealie.schema.recipe import Recipe, RecipeImageTypes, ScrapeRecipe
|
||||
from mealie.schema.recipe.recipe import (
|
||||
CreateRecipe,
|
||||
CreateRecipeByUrlBulk,
|
||||
RecipePagination,
|
||||
RecipePaginationQuery,
|
||||
RecipeSummary,
|
||||
RecipeSummaryWithIngredients,
|
||||
@@ -148,23 +146,23 @@ router = UserAPIRouter(prefix="/recipes", tags=["Recipe: CRUD"], route_class=Mea
|
||||
@controller(router)
|
||||
class RecipeController(BaseRecipeController):
|
||||
def handle_exceptions(self, ex: Exception) -> None:
|
||||
match type(ex): # noqa match statement not supported
|
||||
case exceptions.PermissionDenied:
|
||||
self.logger.error("Permission Denied on recipe controller action")
|
||||
raise HTTPException(status_code=403, detail=ErrorResponse.respond(message="Permission Denied"))
|
||||
case exceptions.NoEntryFound:
|
||||
self.logger.error("No Entry Found on recipe controller action")
|
||||
raise HTTPException(status_code=404, detail=ErrorResponse.respond(message="No Entry Found"))
|
||||
case sqlalchemy.exc.IntegrityError:
|
||||
self.logger.error("SQL Integrity Error on recipe controller action")
|
||||
raise HTTPException(status_code=400, detail=ErrorResponse.respond(message="Recipe already exists"))
|
||||
thrownType = type(ex)
|
||||
|
||||
case _:
|
||||
self.logger.error("Unknown Error on recipe controller action")
|
||||
self.logger.exception(ex)
|
||||
raise HTTPException(
|
||||
status_code=500, detail=ErrorResponse.respond(message="Unknown Error", exception=str(ex))
|
||||
)
|
||||
if thrownType == exceptions.PermissionDenied:
|
||||
self.logger.error("Permission Denied on recipe controller action")
|
||||
raise HTTPException(status_code=403, detail=ErrorResponse.respond(message="Permission Denied"))
|
||||
elif thrownType == exceptions.NoEntryFound:
|
||||
self.logger.error("No Entry Found on recipe controller action")
|
||||
raise HTTPException(status_code=404, detail=ErrorResponse.respond(message="No Entry Found"))
|
||||
elif thrownType == sqlalchemy.exc.IntegrityError:
|
||||
self.logger.error("SQL Integrity Error on recipe controller action")
|
||||
raise HTTPException(status_code=400, detail=ErrorResponse.respond(message="Recipe already exists"))
|
||||
else:
|
||||
self.logger.error("Unknown Error on recipe controller action")
|
||||
self.logger.exception(ex)
|
||||
raise HTTPException(
|
||||
status_code=500, detail=ErrorResponse.respond(message="Unknown Error", exception=ex.__class__.__name__)
|
||||
)
|
||||
|
||||
# =======================================================================
|
||||
# URL Scraping Operations
|
||||
@@ -245,12 +243,12 @@ class RecipeController(BaseRecipeController):
|
||||
self,
|
||||
request: Request,
|
||||
q: RecipePaginationQuery = Depends(),
|
||||
cookbook: Optional[UUID4 | str] = Query(None),
|
||||
categories: Optional[list[UUID4 | str]] = Query(None),
|
||||
tags: Optional[list[UUID4 | str]] = Query(None),
|
||||
tools: Optional[list[UUID4 | str]] = Query(None),
|
||||
cookbook: UUID4 | str | None = Query(None),
|
||||
categories: list[UUID4 | str] | None = Query(None),
|
||||
tags: list[UUID4 | str] | None = Query(None),
|
||||
tools: list[UUID4 | str] | None = Query(None),
|
||||
):
|
||||
cookbook_data: Optional[ReadCookBook] = None
|
||||
cookbook_data: ReadCookBook | None = None
|
||||
if cookbook:
|
||||
cb_match_attr = "slug" if isinstance(cookbook, str) else "id"
|
||||
cookbook_data = self.cookbooks_repo.get_one(cookbook, cb_match_attr)
|
||||
|
||||
@@ -130,20 +130,15 @@ class WebhookEventListener(EventListenerBase):
|
||||
return scheduled_webhooks
|
||||
|
||||
def publish_to_subscribers(self, event: Event, subscribers: list[ReadWebhook]) -> None:
|
||||
match event.document_data.document_type: # noqa - match statement not supported by ruff
|
||||
case EventDocumentType.mealplan:
|
||||
# TODO: limit mealplan data to a date range instead of returning all mealplans
|
||||
meal_repo = self.repos.meals.by_group(self.group_id)
|
||||
meal_pagination_data = meal_repo.page_all(pagination=PaginationQuery(page=1, per_page=-1))
|
||||
meal_data = meal_pagination_data.items
|
||||
if meal_data:
|
||||
webhook_data = cast(EventWebhookData, event.document_data)
|
||||
webhook_data.webhook_body = meal_data
|
||||
self.publisher.publish(event, [webhook.url for webhook in subscribers])
|
||||
|
||||
case _:
|
||||
# if the document type is not supported, do nothing
|
||||
pass
|
||||
if event.document_data.document_type == EventDocumentType.mealplan:
|
||||
# TODO: limit mealplan data to a date range instead of returning all mealplans
|
||||
meal_repo = self.repos.meals.by_group(self.group_id)
|
||||
meal_pagination_data = meal_repo.page_all(pagination=PaginationQuery(page=1, per_page=-1))
|
||||
meal_data = meal_pagination_data.items
|
||||
if meal_data:
|
||||
webhook_data = cast(EventWebhookData, event.document_data)
|
||||
webhook_data.webhook_body = meal_data
|
||||
self.publisher.publish(event, [webhook.url for webhook in subscribers])
|
||||
|
||||
def get_scheduled_webhooks(self, start_dt: datetime, end_dt: datetime) -> list[ReadWebhook]:
|
||||
"""Fetches all scheduled webhooks from the database"""
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import zipfile
|
||||
from abc import abstractmethod, abstractproperty
|
||||
from collections.abc import Iterator
|
||||
from collections.abc import Callable, Iterator
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Callable
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
@@ -4,9 +4,10 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from asyncio import ensure_future
|
||||
from collections.abc import Callable, Coroutine
|
||||
from functools import wraps
|
||||
from traceback import format_exception
|
||||
from typing import Any, Callable, Coroutine
|
||||
from typing import Any
|
||||
|
||||
from starlette.concurrency import run_in_threadpool
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import time
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, Callable
|
||||
from collections.abc import Callable
|
||||
from typing import Any
|
||||
|
||||
import extruct
|
||||
from fastapi import HTTPException, status
|
||||
@@ -32,7 +33,6 @@ async def safe_scrape_html(url: str) -> str:
|
||||
async with AsyncClient() as client:
|
||||
html_bytes = b""
|
||||
async with client.stream("GET", url, timeout=SCRAPER_TIMEOUT, headers={"User-Agent": _FIREFOX_UA}) as resp:
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
async for chunk in resp.aiter_bytes(chunk_size=1024):
|
||||
|
||||
Reference in New Issue
Block a user