mirror of
https://github.com/mealie-recipes/mealie.git
synced 2026-02-07 08:23:12 -05:00
chore: drop-apscheduler (#1152)
* rewrite interval timer * drop apscheduler * fix type annotations
This commit is contained in:
@@ -50,19 +50,18 @@ app.add_middleware(GZipMiddleware, minimum_size=1000)
|
||||
register_debug_handler(app)
|
||||
|
||||
|
||||
def start_scheduler():
|
||||
SchedulerService.start()
|
||||
|
||||
async def start_scheduler():
|
||||
SchedulerRegistry.register_daily(
|
||||
tasks.purge_group_registration,
|
||||
tasks.purge_password_reset_tokens,
|
||||
tasks.purge_group_data_exports,
|
||||
)
|
||||
|
||||
SchedulerRegistry.register_hourly()
|
||||
SchedulerRegistry.register_minutely(tasks.update_group_webhooks)
|
||||
SchedulerRegistry.register_minutely(lambda: logger.info("Scheduler tick"))
|
||||
|
||||
logger.info(SchedulerService.scheduler.print_jobs())
|
||||
SchedulerRegistry.print_jobs()
|
||||
|
||||
await SchedulerService.start()
|
||||
|
||||
|
||||
def api_routers():
|
||||
@@ -75,8 +74,8 @@ api_routers()
|
||||
|
||||
|
||||
@app.on_event("startup")
|
||||
def system_startup():
|
||||
start_scheduler()
|
||||
async def system_startup():
|
||||
await start_scheduler()
|
||||
|
||||
logger.info("-----SYSTEM STARTUP----- \n")
|
||||
logger.info("------APP SETTINGS------")
|
||||
|
||||
82
mealie/services/scheduler/runner.py
Normal file
82
mealie/services/scheduler/runner.py
Normal file
@@ -0,0 +1,82 @@
|
||||
# Code Adapted/Copied From fastapi_utils
|
||||
# https://github.com/dmontagu/fastapi-utils/blob/master/fastapi_utils/tasks.py
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from asyncio import ensure_future
|
||||
from functools import wraps
|
||||
from traceback import format_exception
|
||||
from typing import Any, Callable, Coroutine, Optional, Union
|
||||
|
||||
from starlette.concurrency import run_in_threadpool
|
||||
|
||||
NoArgsNoReturnFuncT = Callable[[], None]
|
||||
NoArgsNoReturnAsyncFuncT = Callable[[], Coroutine[Any, Any, None]]
|
||||
NoArgsNoReturnDecorator = Callable[[Union[NoArgsNoReturnFuncT, NoArgsNoReturnAsyncFuncT]], NoArgsNoReturnAsyncFuncT]
|
||||
|
||||
|
||||
def repeat_every(
|
||||
*,
|
||||
minutes: float,
|
||||
wait_first: bool = False,
|
||||
logger: Optional[logging.Logger] = None,
|
||||
raise_exceptions: bool = False,
|
||||
max_repetitions: Optional[int] = None,
|
||||
) -> NoArgsNoReturnDecorator:
|
||||
"""
|
||||
This function returns a decorator that modifies a function so it is periodically re-executed after its first call.
|
||||
The function it decorates should accept no arguments and return nothing. If necessary, this can be accomplished
|
||||
by using `functools.partial` or otherwise wrapping the target function prior to decoration.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
seconds: float
|
||||
The number of seconds to wait between repeated calls
|
||||
wait_first: bool (default False)
|
||||
If True, the function will wait for a single period before the first call
|
||||
logger: Optional[logging.Logger] (default None)
|
||||
The logger to use to log any exceptions raised by calls to the decorated function.
|
||||
If not provided, exceptions will not be logged by this function (though they may be handled by the event loop).
|
||||
raise_exceptions: bool (default False)
|
||||
If True, errors raised by the decorated function will be raised to the event loop's exception handler.
|
||||
Note that if an error is raised, the repeated execution will stop.
|
||||
Otherwise, exceptions are just logged and the execution continues to repeat.
|
||||
See https://docs.python.org/3/library/asyncio-eventloop.html#asyncio.loop.set_exception_handler for more info.
|
||||
max_repetitions: Optional[int] (default None)
|
||||
The maximum number of times to call the repeated function. If `None`, the function is repeated forever.
|
||||
"""
|
||||
|
||||
def decorator(func: Union[NoArgsNoReturnAsyncFuncT, NoArgsNoReturnFuncT]) -> NoArgsNoReturnAsyncFuncT:
|
||||
"""
|
||||
Converts the decorated function into a repeated, periodically-called version of itself.
|
||||
"""
|
||||
is_coroutine = asyncio.iscoroutinefunction(func)
|
||||
|
||||
@wraps(func)
|
||||
async def wrapped() -> None:
|
||||
repetitions = 0
|
||||
|
||||
async def loop() -> None:
|
||||
nonlocal repetitions
|
||||
if wait_first:
|
||||
await asyncio.sleep(minutes * 60)
|
||||
while max_repetitions is None or repetitions < max_repetitions:
|
||||
try:
|
||||
if is_coroutine:
|
||||
await func() # type: ignore
|
||||
else:
|
||||
await run_in_threadpool(func)
|
||||
repetitions += 1
|
||||
except Exception as exc:
|
||||
if logger is not None:
|
||||
formatted_exception = "".join(format_exception(type(exc), exc, exc.__traceback__))
|
||||
logger.error(formatted_exception)
|
||||
if raise_exceptions:
|
||||
raise exc
|
||||
await asyncio.sleep(minutes * 60)
|
||||
|
||||
ensure_future(loop())
|
||||
|
||||
return wrapped
|
||||
|
||||
return decorator
|
||||
@@ -1,21 +1,10 @@
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
@dataclass
|
||||
class Cron:
|
||||
hours: int
|
||||
minutes: int
|
||||
|
||||
@classmethod
|
||||
def parse(cls, time_str: str) -> "Cron":
|
||||
time = time_str.split(":")
|
||||
return Cron(hours=int(time[0]), minutes=int(time[1]))
|
||||
|
||||
|
||||
@dataclass
|
||||
@dataclass(slots=True)
|
||||
class ScheduledFunc(BaseModel):
|
||||
id: tuple[str, int]
|
||||
name: str
|
||||
@@ -25,4 +14,4 @@ class ScheduledFunc(BaseModel):
|
||||
|
||||
max_instances: int = 1
|
||||
replace_existing: bool = True
|
||||
args: list = []
|
||||
args: list = field(default_factory=list)
|
||||
|
||||
@@ -46,3 +46,14 @@ class SchedulerRegistry:
|
||||
def remove_minutely(callback: Callable):
|
||||
logger.info(f"Removing minutely callback: {callback.__name__}")
|
||||
SchedulerRegistry._minutely.remove(callback)
|
||||
|
||||
@staticmethod
|
||||
def print_jobs():
|
||||
for job in SchedulerRegistry._daily:
|
||||
logger.info(f"Daily job: {job.__name__}")
|
||||
|
||||
for job in SchedulerRegistry._hourly:
|
||||
logger.info(f"Hourly job: {job.__name__}")
|
||||
|
||||
for job in SchedulerRegistry._minutely:
|
||||
logger.info(f"Minutely job: {job.__name__}")
|
||||
|
||||
@@ -1,73 +1,25 @@
|
||||
from pathlib import Path
|
||||
|
||||
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
|
||||
from mealie.core import root_logger
|
||||
from mealie.services.scheduler.runner import repeat_every
|
||||
|
||||
from .scheduled_func import ScheduledFunc
|
||||
from .scheduler_registry import SchedulerRegistry
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
CWD = Path(__file__).parent
|
||||
|
||||
SCHEDULER_DB = CWD / ".scheduler.db"
|
||||
SCHEDULER_DATABASE = f"sqlite:///{SCHEDULER_DB}"
|
||||
|
||||
MINUTES_DAY = 1440
|
||||
MINUTES_15 = 15
|
||||
MINUTES_5 = 5
|
||||
MINUTES_HOUR = 60
|
||||
|
||||
|
||||
class SchedulerService:
|
||||
"""
|
||||
SchedulerService is a wrapper class around the APScheduler library. It is resonpseible for interacting with the scheduler
|
||||
and scheduling events. This includes the interval events that are registered in the SchedulerRegistry as well as cron events
|
||||
that are used for sending webhooks. In most cases, unless the the schedule is dynamic, events should be registered with the
|
||||
SchedulerRegistry. See app.py for examples.
|
||||
"""
|
||||
|
||||
_scheduler: BackgroundScheduler
|
||||
|
||||
@staticmethod
|
||||
def start():
|
||||
# Preclean
|
||||
SCHEDULER_DB.unlink(missing_ok=True)
|
||||
|
||||
# Register Interval Jobs and Start Scheduler
|
||||
SchedulerService._scheduler = BackgroundScheduler(jobstores={"default": SQLAlchemyJobStore(SCHEDULER_DATABASE)})
|
||||
SchedulerService._scheduler.add_job(run_daily, "interval", minutes=MINUTES_DAY, id="Daily Interval Jobs")
|
||||
SchedulerService._scheduler.add_job(run_hourly, "interval", minutes=MINUTES_HOUR, id="Hourly Interval Jobs")
|
||||
SchedulerService._scheduler.add_job(run_minutely, "interval", minutes=MINUTES_15, id="Regular Interval Jobs")
|
||||
SchedulerService._scheduler.start()
|
||||
|
||||
@classmethod
|
||||
@property
|
||||
def scheduler(cls) -> BackgroundScheduler:
|
||||
return SchedulerService._scheduler
|
||||
|
||||
@staticmethod
|
||||
def add_cron_job(job_func: ScheduledFunc):
|
||||
SchedulerService.scheduler.add_job( # type: ignore
|
||||
job_func.callback,
|
||||
trigger="cron",
|
||||
name=job_func.id,
|
||||
hour=job_func.hour,
|
||||
minute=job_func.minutes,
|
||||
max_instances=job_func.max_instances, # type: ignore
|
||||
replace_existing=job_func.replace_existing,
|
||||
args=job_func.args,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def update_cron_job(job_func: ScheduledFunc):
|
||||
SchedulerService.scheduler.reschedule_job( # type: ignore
|
||||
job_func.id,
|
||||
trigger="cron",
|
||||
hour=job_func.hour,
|
||||
minute=job_func.minutes,
|
||||
)
|
||||
async def start():
|
||||
await run_minutely()
|
||||
await run_daily()
|
||||
await run_hourly()
|
||||
|
||||
|
||||
def _scheduled_task_wrapper(callable):
|
||||
@@ -77,18 +29,21 @@ def _scheduled_task_wrapper(callable):
|
||||
logger.error(f"Error in scheduled task func='{callable.__name__}': exception='{e}'")
|
||||
|
||||
|
||||
@repeat_every(minutes=MINUTES_DAY, wait_first=True, logger=logger)
|
||||
def run_daily():
|
||||
logger.info("Running daily callbacks")
|
||||
for func in SchedulerRegistry._daily:
|
||||
_scheduled_task_wrapper(func)
|
||||
|
||||
|
||||
@repeat_every(minutes=MINUTES_HOUR, wait_first=True, logger=logger)
|
||||
def run_hourly():
|
||||
logger.info("Running hourly callbacks")
|
||||
for func in SchedulerRegistry._hourly:
|
||||
_scheduled_task_wrapper(func)
|
||||
|
||||
|
||||
@repeat_every(minutes=MINUTES_5, wait_first=True, logger=logger)
|
||||
def run_minutely():
|
||||
logger.info("Running minutely callbacks")
|
||||
for func in SchedulerRegistry._minutely:
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from .purge_group_exports import *
|
||||
from .purge_password_reset import *
|
||||
from .purge_registration import *
|
||||
from .webhooks import *
|
||||
|
||||
"""
|
||||
Tasks Package
|
||||
|
||||
@@ -1,58 +0,0 @@
|
||||
import json
|
||||
|
||||
import requests
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
from mealie.core import root_logger
|
||||
from mealie.db.db_setup import create_session
|
||||
from mealie.repos.all_repositories import get_repositories
|
||||
from mealie.schema.group.webhook import ReadWebhook
|
||||
|
||||
from ..scheduled_func import Cron, ScheduledFunc
|
||||
from ..scheduler_service import SchedulerService
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
|
||||
def post_webhooks(webhook_id: int, session: Session = None):
|
||||
session = session or create_session()
|
||||
db = get_repositories(session)
|
||||
webhook: ReadWebhook = db.webhooks.get_one(webhook_id)
|
||||
|
||||
if not webhook.enabled:
|
||||
logger.info(f"Skipping webhook {webhook_id}. reasons: is disabled")
|
||||
return
|
||||
|
||||
todays_recipe = db.meals.get_today(webhook.group_id)
|
||||
|
||||
if not todays_recipe:
|
||||
return
|
||||
|
||||
payload = json.loads([x.json(by_alias=True) for x in todays_recipe]) # type: ignore
|
||||
response = requests.post(webhook.url, json=payload)
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.error(f"Error posting webhook to {webhook.url} ({response.status_code})")
|
||||
|
||||
session.close()
|
||||
|
||||
|
||||
def update_group_webhooks():
|
||||
session = create_session()
|
||||
db = get_repositories(session)
|
||||
|
||||
webhooks: list[ReadWebhook] = db.webhooks.get_all()
|
||||
|
||||
for webhook in webhooks:
|
||||
cron = Cron.parse(webhook.time)
|
||||
|
||||
job_func = ScheduledFunc(
|
||||
id=webhook.id,
|
||||
name=f"Group {webhook.group_id} webhook",
|
||||
callback=post_webhooks,
|
||||
hour=cron.hours,
|
||||
minute=cron.minutes,
|
||||
args=(webhook.id),
|
||||
)
|
||||
|
||||
SchedulerService.add_cron_job(job_func)
|
||||
Reference in New Issue
Block a user