chore: update linters (#2095)

* update deps

* ruff auto-fixes

* refactor match statements where possible
This commit is contained in:
Hayden
2023-02-05 09:51:44 -09:00
committed by GitHub
parent d5efaad2c3
commit f3a26f864d
11 changed files with 81 additions and 104 deletions

View File

@@ -130,20 +130,15 @@ class WebhookEventListener(EventListenerBase):
return scheduled_webhooks
def publish_to_subscribers(self, event: Event, subscribers: list[ReadWebhook]) -> None:
match event.document_data.document_type: # noqa - match statement not supported by ruff
case EventDocumentType.mealplan:
# TODO: limit mealplan data to a date range instead of returning all mealplans
meal_repo = self.repos.meals.by_group(self.group_id)
meal_pagination_data = meal_repo.page_all(pagination=PaginationQuery(page=1, per_page=-1))
meal_data = meal_pagination_data.items
if meal_data:
webhook_data = cast(EventWebhookData, event.document_data)
webhook_data.webhook_body = meal_data
self.publisher.publish(event, [webhook.url for webhook in subscribers])
case _:
# if the document type is not supported, do nothing
pass
if event.document_data.document_type == EventDocumentType.mealplan:
# TODO: limit mealplan data to a date range instead of returning all mealplans
meal_repo = self.repos.meals.by_group(self.group_id)
meal_pagination_data = meal_repo.page_all(pagination=PaginationQuery(page=1, per_page=-1))
meal_data = meal_pagination_data.items
if meal_data:
webhook_data = cast(EventWebhookData, event.document_data)
webhook_data.webhook_body = meal_data
self.publisher.publish(event, [webhook.url for webhook in subscribers])
def get_scheduled_webhooks(self, start_dt: datetime, end_dt: datetime) -> list[ReadWebhook]:
"""Fetches all scheduled webhooks from the database"""

View File

@@ -1,9 +1,8 @@
import zipfile
from abc import abstractmethod, abstractproperty
from collections.abc import Iterator
from collections.abc import Callable, Iterator
from dataclasses import dataclass
from pathlib import Path
from typing import Callable
from uuid import UUID
from pydantic import BaseModel

View File

@@ -4,9 +4,10 @@
import asyncio
import logging
from asyncio import ensure_future
from collections.abc import Callable, Coroutine
from functools import wraps
from traceback import format_exception
from typing import Any, Callable, Coroutine
from typing import Any
from starlette.concurrency import run_in_threadpool

View File

@@ -1,6 +1,7 @@
import time
from abc import ABC, abstractmethod
from typing import Any, Callable
from collections.abc import Callable
from typing import Any
import extruct
from fastapi import HTTPException, status
@@ -32,7 +33,6 @@ async def safe_scrape_html(url: str) -> str:
async with AsyncClient() as client:
html_bytes = b""
async with client.stream("GET", url, timeout=SCRAPER_TIMEOUT, headers={"User-Agent": _FIREFOX_UA}) as resp:
start_time = time.time()
async for chunk in resp.aiter_bytes(chunk_size=1024):