mirror of
https://github.com/mealie-recipes/mealie.git
synced 2026-02-04 23:13:12 -05:00
feat(frontend): ✨ Fix scheduler, forgot password flow, and minor bug fixes (#725)
* feat(frontend): 💄 add recipe title * fix(frontend): 🐛 fixes #722 side-bar issue * feat(frontend): ✨ Add page titles to all pages * minor cleanup * refactor(backend): ♻️ rewrite scheduler to be more modulare and work * feat(frontend): ✨ start password reset functionality * refactor(backend): ♻️ refactor application settings to facilitate dependency injection * refactor(backend): 🔥 remove RECIPE_SETTINGS env variables in favor of group settings * formatting * refactor(backend): ♻️ align naming convention * feat(backend): ✨ password reset * test(backend): ✅ password reset * feat(frontend): ✨ self-service password reset * purge password schedule * update user creation for tests Co-authored-by: Hayden <hay-kot@pm.me>
This commit is contained in:
@@ -5,7 +5,7 @@ from fastapi import BackgroundTasks, Depends, HTTPException, status
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
from mealie.core.config import get_app_dirs, get_settings
|
||||
from mealie.core.config import get_app_dirs, get_app_settings
|
||||
from mealie.core.root_logger import get_logger
|
||||
from mealie.db.database import get_database
|
||||
from mealie.db.db_setup import SessionLocal
|
||||
@@ -63,7 +63,7 @@ class BaseHttpService(Generic[T, D], ABC):
|
||||
# Static Globals Dependency Injection
|
||||
self.db = get_database(session)
|
||||
self.app_dirs = get_app_dirs()
|
||||
self.settings = get_settings()
|
||||
self.settings = get_app_settings()
|
||||
|
||||
def _existing_factory(dependency: Type[CLS_DEP]) -> classmethod:
|
||||
def cls_method(cls, item_id: T, deps: CLS_DEP = Depends(dependency)):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from mealie.core.config import get_app_dirs, get_settings
|
||||
from mealie.core.config import get_app_dirs, get_app_settings
|
||||
|
||||
|
||||
class BaseService:
|
||||
def __init__(self) -> None:
|
||||
self.app_dirs = get_app_dirs()
|
||||
self.settings = get_settings()
|
||||
self.settings = get_app_settings()
|
||||
|
||||
@@ -9,10 +9,10 @@ from pathvalidate import sanitize_filename
|
||||
from pydantic.main import BaseModel
|
||||
|
||||
from mealie.core import root_logger
|
||||
from mealie.core.config import app_dirs
|
||||
from mealie.core.config import get_app_dirs
|
||||
|
||||
app_dirs = get_app_dirs()
|
||||
from mealie.db.database import get_database
|
||||
from mealie.db.db_setup import create_session
|
||||
from mealie.services.events import create_backup_event
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
@@ -141,15 +141,3 @@ def backup_all(
|
||||
db_export.export_items(all_notifications, "notifications")
|
||||
|
||||
return db_export.finish_export()
|
||||
|
||||
|
||||
def auto_backup_job():
|
||||
for backup in app_dirs.BACKUP_DIR.glob("Auto*.zip"):
|
||||
backup.unlink()
|
||||
|
||||
templates = [template for template in app_dirs.TEMPLATE_DIR.iterdir()]
|
||||
session = create_session()
|
||||
backup_all(session=session, tag="Auto", templates=templates)
|
||||
logger.info("Auto Backup Called")
|
||||
create_backup_event("Automated Backup", "Automated backup created", session)
|
||||
session.close()
|
||||
|
||||
@@ -7,7 +7,9 @@ from typing import Callable
|
||||
from pydantic.main import BaseModel
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
from mealie.core.config import app_dirs
|
||||
from mealie.core.config import get_app_dirs
|
||||
|
||||
app_dirs = get_app_dirs()
|
||||
from mealie.db.database import get_database
|
||||
from mealie.schema.admin import (
|
||||
CommentImport,
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
from fastapi import Depends, HTTPException, status
|
||||
|
||||
from mealie.core.dependencies.grouped import UserDeps
|
||||
from mealie.core.root_logger import get_logger
|
||||
from mealie.core.security import url_safe_token
|
||||
from mealie.schema.group.group_permissions import SetPermissions
|
||||
from mealie.schema.group.group_preferences import UpdateGroupPreferences
|
||||
from mealie.schema.group.invite_token import EmailInitationResponse, EmailInvitation, ReadInviteToken, SaveInviteToken
|
||||
@@ -86,7 +85,7 @@ class GroupSelfService(UserHttpService[int, str]):
|
||||
if not self.user.can_invite:
|
||||
raise HTTPException(status.HTTP_403_FORBIDDEN, detail="User is not allowed to create invite tokens")
|
||||
|
||||
token = SaveInviteToken(uses_left=uses, group_id=self.group_id, token=uuid4().hex)
|
||||
token = SaveInviteToken(uses_left=uses, group_id=self.group_id, token=url_safe_token())
|
||||
return self.db.group_invite_tokens.create(token)
|
||||
|
||||
def get_invite_tokens(self) -> list[ReadInviteToken]:
|
||||
|
||||
@@ -5,10 +5,11 @@ from pathlib import Path
|
||||
from PIL import Image
|
||||
|
||||
from mealie.core import root_logger
|
||||
from mealie.core.config import app_dirs
|
||||
from mealie.core.config import get_app_dirs
|
||||
from mealie.schema.recipe import Recipe
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
app_dirs = get_app_dirs()
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -3,7 +3,9 @@ from typing import Optional
|
||||
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
from mealie.core.config import app_dirs
|
||||
from mealie.core.config import get_app_dirs
|
||||
|
||||
app_dirs = get_app_dirs()
|
||||
from mealie.schema.admin import MigrationImport
|
||||
from mealie.schema.user.user import PrivateUser
|
||||
from mealie.services.migrations import helpers
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
from .scheduler_registry import *
|
||||
from .scheduler_service import *
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
|
||||
from mealie.core.config import app_dirs, settings
|
||||
|
||||
app_dirs.DATA_DIR.joinpath("scheduler.db").unlink(missing_ok=True)
|
||||
scheduler = BackgroundScheduler(jobstores={"default": SQLAlchemyJobStore(settings.SCHEDULER_DATABASE)})
|
||||
30
mealie/services/scheduler/scheduled_func.py
Normal file
30
mealie/services/scheduler/scheduled_func.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Callable, Tuple
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
@dataclass
|
||||
class Cron:
|
||||
hours: int
|
||||
minutes: int
|
||||
|
||||
@classmethod
|
||||
def parse(cls, time_str: str) -> Cron:
|
||||
time = time_str.split(":")
|
||||
return Cron(hours=int(time[0]), minutes=int(time[1]))
|
||||
|
||||
|
||||
@dataclass
|
||||
class ScheduledFunc(BaseModel):
|
||||
id: Tuple[str, int]
|
||||
name: str
|
||||
hour: int
|
||||
minutes: int
|
||||
callback: Callable
|
||||
|
||||
max_instances: int = 1
|
||||
replace_existing: bool = True
|
||||
args: list = []
|
||||
@@ -1,124 +0,0 @@
|
||||
import datetime
|
||||
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
|
||||
from mealie.core import root_logger
|
||||
from mealie.db.database import get_database
|
||||
from mealie.db.db_setup import create_session
|
||||
from mealie.db.models.event import Event
|
||||
from mealie.schema.user import GroupInDB
|
||||
from mealie.services.backups.exports import auto_backup_job
|
||||
from mealie.services.scheduler.global_scheduler import scheduler
|
||||
from mealie.services.scheduler.scheduler_utils import Cron, cron_parser
|
||||
from mealie.utils.post_webhooks import post_webhooks
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
# TODO Fix Scheduler
|
||||
|
||||
|
||||
@scheduler.scheduled_job(trigger="interval", minutes=1440)
|
||||
def purge_events_database():
|
||||
"""
|
||||
Ran daily. Purges all events after 100
|
||||
"""
|
||||
logger.info("Purging Events in Database")
|
||||
expiration_days = 7
|
||||
limit = datetime.datetime.now() - datetime.timedelta(days=expiration_days)
|
||||
session = create_session()
|
||||
session.query(Event).filter(Event.time_stamp <= limit).delete()
|
||||
session.commit()
|
||||
session.close()
|
||||
logger.info("Events Purges")
|
||||
|
||||
|
||||
@scheduler.scheduled_job(trigger="interval", minutes=30)
|
||||
def update_webhook_schedule():
|
||||
"""
|
||||
A scheduled background job that runs every 30 minutes to
|
||||
poll the database for changes and reschedule the webhook time
|
||||
"""
|
||||
session = create_session()
|
||||
db = get_database(session)
|
||||
all_groups: list[GroupInDB] = db.groups.get_all()
|
||||
|
||||
for group in all_groups:
|
||||
|
||||
time = cron_parser(group.webhook_time)
|
||||
job = JOB_STORE.get(group.name)
|
||||
|
||||
if not job:
|
||||
logger.error(f"No job found for group: {group.name}")
|
||||
logger.info(f"Creating scheduled task for {group.name}")
|
||||
JOB_STORE.update(add_group_to_schedule(scheduler, group))
|
||||
continue
|
||||
|
||||
scheduler.reschedule_job(
|
||||
job.scheduled_task.id,
|
||||
trigger="cron",
|
||||
hour=time.hours,
|
||||
minute=time.minutes,
|
||||
)
|
||||
|
||||
session.close()
|
||||
logger.info(scheduler.print_jobs())
|
||||
|
||||
|
||||
class ScheduledFunction:
|
||||
def __init__(
|
||||
self,
|
||||
scheduler: BackgroundScheduler,
|
||||
function,
|
||||
cron: Cron,
|
||||
name: str,
|
||||
args: list = None,
|
||||
) -> None:
|
||||
self.scheduled_task = scheduler.add_job(
|
||||
function,
|
||||
trigger="cron",
|
||||
name=name,
|
||||
hour=cron.hours,
|
||||
minute=cron.minutes,
|
||||
max_instances=1,
|
||||
replace_existing=True,
|
||||
args=args,
|
||||
)
|
||||
|
||||
|
||||
def add_group_to_schedule(scheduler, group: GroupInDB):
|
||||
cron = cron_parser(group.webhook_time)
|
||||
|
||||
return {
|
||||
group.name: ScheduledFunction(
|
||||
scheduler,
|
||||
post_webhooks,
|
||||
cron=cron,
|
||||
name=group.name,
|
||||
args=[group.id],
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def init_webhook_schedule(scheduler, job_store: dict):
|
||||
session = create_session()
|
||||
db = get_database(session)
|
||||
all_groups: list[GroupInDB] = db.groups.get_all()
|
||||
|
||||
for group in all_groups:
|
||||
job_store.update(add_group_to_schedule(scheduler, group))
|
||||
|
||||
session.close()
|
||||
|
||||
return job_store
|
||||
|
||||
|
||||
logger.info("----INIT SCHEDULE OBJECT-----")
|
||||
|
||||
JOB_STORE = {
|
||||
"backup_job": ScheduledFunction(scheduler, auto_backup_job, Cron(hours=00, minutes=00), "backups"),
|
||||
}
|
||||
|
||||
JOB_STORE = init_webhook_schedule(scheduler=scheduler, job_store=JOB_STORE)
|
||||
|
||||
logger.info(scheduler.print_jobs())
|
||||
scheduler.start()
|
||||
43
mealie/services/scheduler/scheduler_registry.py
Normal file
43
mealie/services/scheduler/scheduler_registry.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Callable
|
||||
|
||||
from mealie.core import root_logger
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
|
||||
class SchedulerRegistry:
|
||||
"""
|
||||
A container class for registring and removing callbacks for the scheduler.
|
||||
"""
|
||||
|
||||
_daily: list[Callable] = []
|
||||
_hourly: list[Callable] = []
|
||||
_minutely: list[Callable] = []
|
||||
|
||||
def _register(name: str, callbacks: list[Callable], callback: Callable):
|
||||
for cb in callback:
|
||||
logger.info(f"Registering {name} callback: {cb.__name__}")
|
||||
callbacks.append(cb)
|
||||
|
||||
def register_daily(*callbacks: Callable):
|
||||
SchedulerRegistry._register("daily", SchedulerRegistry._daily, callbacks)
|
||||
|
||||
def remove_daily(callback: Callable):
|
||||
logger.info(f"Removing daily callback: {callback.__name__}")
|
||||
SchedulerRegistry._daily.remove(callback)
|
||||
|
||||
def register_hourly(*callbacks: Callable):
|
||||
SchedulerRegistry._register("daily", SchedulerRegistry._hourly, callbacks)
|
||||
|
||||
def remove_hourly(callback: Callable):
|
||||
logger.info(f"Removing hourly callback: {callback.__name__}")
|
||||
SchedulerRegistry._hourly.remove(callback)
|
||||
|
||||
def register_minutely(*callbacks: Callable):
|
||||
SchedulerRegistry._register("minutely", SchedulerRegistry._minutely, callbacks)
|
||||
|
||||
def remove_minutely(callback: Callable):
|
||||
logger.info(f"Removing minutely callback: {callback.__name__}")
|
||||
SchedulerRegistry._minutely.remove(callback)
|
||||
104
mealie/services/scheduler/scheduler_service.py
Normal file
104
mealie/services/scheduler/scheduler_service.py
Normal file
@@ -0,0 +1,104 @@
|
||||
from pathlib import Path
|
||||
|
||||
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
|
||||
from mealie.core import root_logger
|
||||
from mealie.core.config import get_app_dirs
|
||||
|
||||
from .scheduled_func import ScheduledFunc
|
||||
from .scheduler_registry import SchedulerRegistry
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
CWD = Path(__file__).parent
|
||||
|
||||
app_dirs = get_app_dirs()
|
||||
TEMP_DATA = app_dirs.DATA_DIR / ".temp"
|
||||
SCHEDULER_DB = TEMP_DATA / "scheduler.db"
|
||||
SCHEDULER_DATABASE = f"sqlite:///{SCHEDULER_DB}"
|
||||
|
||||
MINUTES_DAY = 1440
|
||||
MINUTES_15 = 15
|
||||
MINUTES_HOUR = 60
|
||||
|
||||
|
||||
class SchedulerService:
|
||||
"""
|
||||
SchedulerService is a wrapper class around the APScheduler library. It is resonpseible for interacting with the scheduler
|
||||
and scheduling events. This includes the interval events that are registered in the SchedulerRegistry as well as cron events
|
||||
that are used for sending webhooks. In most cases, unless the the schedule is dynamic, events should be registered with the
|
||||
SchedulerRegistry. See app.py for examples.
|
||||
"""
|
||||
|
||||
_scheduler: BackgroundScheduler = None
|
||||
# Not Sure if this is still needed?
|
||||
# _job_store: dict[str, ScheduledFunc] = {}
|
||||
|
||||
def start():
|
||||
# Preclean
|
||||
SCHEDULER_DB.unlink(missing_ok=True)
|
||||
|
||||
# Scaffold
|
||||
TEMP_DATA.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Register Interval Jobs and Start Scheduler
|
||||
SchedulerService._scheduler = BackgroundScheduler(jobstores={"default": SQLAlchemyJobStore(SCHEDULER_DATABASE)})
|
||||
SchedulerService._scheduler.add_job(run_daily, "interval", minutes=MINUTES_DAY, id="Daily Interval Jobs")
|
||||
SchedulerService._scheduler.add_job(run_hourly, "interval", minutes=MINUTES_HOUR, id="Hourly Interval Jobs")
|
||||
SchedulerService._scheduler.add_job(run_minutely, "interval", minutes=MINUTES_15, id="Regular Interval Jobs")
|
||||
SchedulerService._scheduler.start()
|
||||
|
||||
@classmethod
|
||||
@property
|
||||
def scheduler(cls) -> BackgroundScheduler:
|
||||
return SchedulerService._scheduler
|
||||
|
||||
def add_cron_job(job_func: ScheduledFunc):
|
||||
SchedulerService.scheduler.add_job(
|
||||
job_func.callback,
|
||||
trigger="cron",
|
||||
name=job_func.id,
|
||||
hour=job_func.hour,
|
||||
minute=job_func.minutes,
|
||||
max_instances=job_func.max_instances,
|
||||
replace_existing=job_func.replace_existing,
|
||||
args=job_func.args,
|
||||
)
|
||||
|
||||
# SchedulerService._job_store[job_func.id] = job_func
|
||||
|
||||
def update_cron_job(job_func: ScheduledFunc):
|
||||
SchedulerService.scheduler.reschedule_job(
|
||||
job_func.id,
|
||||
trigger="cron",
|
||||
hour=job_func.hour,
|
||||
minute=job_func.minutes,
|
||||
)
|
||||
|
||||
# SchedulerService._job_store[job_func.id] = job_func
|
||||
|
||||
|
||||
def _scheduled_task_wrapper(callable):
|
||||
try:
|
||||
callable()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in scheduled task func='{callable.__name__}': exception='{e}'")
|
||||
|
||||
|
||||
def run_daily():
|
||||
logger.info("Running daily callbacks")
|
||||
for func in SchedulerRegistry._daily:
|
||||
_scheduled_task_wrapper(func)
|
||||
|
||||
|
||||
def run_hourly():
|
||||
logger.info("Running hourly callbacks")
|
||||
for func in SchedulerRegistry._hourly:
|
||||
_scheduled_task_wrapper(func)
|
||||
|
||||
|
||||
def run_minutely():
|
||||
logger.info("Running minutely callbacks")
|
||||
for func in SchedulerRegistry._minutely:
|
||||
_scheduled_task_wrapper(func)
|
||||
@@ -1,8 +0,0 @@
|
||||
import collections
|
||||
|
||||
Cron = collections.namedtuple("Cron", "hours minutes")
|
||||
|
||||
|
||||
def cron_parser(time_str: str) -> Cron:
|
||||
time = time_str.split(":")
|
||||
return Cron(hours=int(time[0]), minutes=int(time[1]))
|
||||
14
mealie/services/scheduler/tasks/__init__.py
Normal file
14
mealie/services/scheduler/tasks/__init__.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from .auto_backup import *
|
||||
from .purge_events import *
|
||||
from .purge_password_reset import *
|
||||
from .purge_registration import *
|
||||
from .webhooks import *
|
||||
|
||||
"""
|
||||
Tasks Package
|
||||
|
||||
Common recurring tasks for the server to perform. Tasks here are registered to the SchedulerRegistry class
|
||||
in the app.py file as a post-startup task. This is done to ensure that the tasks are run after the server has
|
||||
started up and the Scheduler object is only avaiable to a single worker.
|
||||
|
||||
"""
|
||||
22
mealie/services/scheduler/tasks/auto_backup.py
Normal file
22
mealie/services/scheduler/tasks/auto_backup.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from mealie.core import root_logger
|
||||
from mealie.core.config import get_app_dirs
|
||||
|
||||
app_dirs = get_app_dirs()
|
||||
from mealie.db.db_setup import create_session
|
||||
from mealie.services.backups.exports import backup_all
|
||||
from mealie.services.events import create_backup_event
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
|
||||
def auto_backup():
|
||||
for backup in app_dirs.BACKUP_DIR.glob("Auto*.zip"):
|
||||
backup.unlink()
|
||||
|
||||
templates = [template for template in app_dirs.TEMPLATE_DIR.iterdir()]
|
||||
session = create_session()
|
||||
backup_all(session=session, tag="Auto", templates=templates)
|
||||
logger.info("generating automated backup")
|
||||
create_backup_event("Automated Backup", "Automated backup created", session)
|
||||
session.close()
|
||||
logger.info("automated backup generated")
|
||||
19
mealie/services/scheduler/tasks/purge_events.py
Normal file
19
mealie/services/scheduler/tasks/purge_events.py
Normal file
@@ -0,0 +1,19 @@
|
||||
import datetime
|
||||
|
||||
from mealie.core import root_logger
|
||||
from mealie.db.db_setup import create_session
|
||||
from mealie.db.models.event import Event
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
|
||||
def purge_events_database():
|
||||
"""Purges all events after 100"""
|
||||
logger.info("purging events in database")
|
||||
expiration_days = 7
|
||||
limit = datetime.datetime.now() - datetime.timedelta(days=expiration_days)
|
||||
session = create_session()
|
||||
session.query(Event).filter(Event.time_stamp <= limit).delete()
|
||||
session.commit()
|
||||
session.close()
|
||||
logger.info("events purges")
|
||||
20
mealie/services/scheduler/tasks/purge_password_reset.py
Normal file
20
mealie/services/scheduler/tasks/purge_password_reset.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import datetime
|
||||
|
||||
from mealie.core import root_logger
|
||||
from mealie.db.db_setup import create_session
|
||||
from mealie.db.models.users.password_reset import PasswordResetModel
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
MAX_DAYS_OLD = 2
|
||||
|
||||
|
||||
def purge_password_reset_tokens():
|
||||
"""Purges all events after x days"""
|
||||
logger.info("purging password reset tokens")
|
||||
limit = datetime.datetime.now() - datetime.timedelta(days=MAX_DAYS_OLD)
|
||||
session = create_session()
|
||||
session.query(PasswordResetModel).filter(PasswordResetModel.created_at <= limit).delete()
|
||||
session.commit()
|
||||
session.close()
|
||||
logger.info("password reset tokens purges")
|
||||
20
mealie/services/scheduler/tasks/purge_registration.py
Normal file
20
mealie/services/scheduler/tasks/purge_registration.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import datetime
|
||||
|
||||
from mealie.core import root_logger
|
||||
from mealie.db.db_setup import create_session
|
||||
from mealie.db.models.group import GroupInviteToken
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
MAX_DAYS_OLD = 4
|
||||
|
||||
|
||||
def purge_group_registration():
|
||||
"""Purges all events after x days"""
|
||||
logger.info("purging expired registration tokens")
|
||||
limit = datetime.datetime.now() - datetime.timedelta(days=MAX_DAYS_OLD)
|
||||
session = create_session()
|
||||
session.query(GroupInviteToken).filter(GroupInviteToken.created_at <= limit).delete()
|
||||
session.commit()
|
||||
session.close()
|
||||
logger.info("registration token purged")
|
||||
58
mealie/services/scheduler/tasks/webhooks.py
Normal file
58
mealie/services/scheduler/tasks/webhooks.py
Normal file
@@ -0,0 +1,58 @@
|
||||
import json
|
||||
|
||||
import requests
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
from mealie.core import root_logger
|
||||
from mealie.db.database import get_database
|
||||
from mealie.db.db_setup import create_session
|
||||
from mealie.schema.group.webhook import ReadWebhook
|
||||
|
||||
from ..scheduled_func import Cron, ScheduledFunc
|
||||
from ..scheduler_service import SchedulerService
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
|
||||
def post_webhooks(webhook_id: int, session: Session = None):
|
||||
session = session or create_session()
|
||||
db = get_database(session)
|
||||
webhook: ReadWebhook = db.webhooks.get_one(webhook_id)
|
||||
|
||||
if not webhook.enabled:
|
||||
logger.info(f"Skipping webhook {webhook_id}. reasons: is disabled")
|
||||
return
|
||||
|
||||
todays_recipe = db.meals.get_today(webhook.group_id)
|
||||
|
||||
if not todays_recipe:
|
||||
return
|
||||
|
||||
payload = json.loads([x.json(by_alias=True) for x in todays_recipe])
|
||||
response = requests.post(webhook.url, json=payload)
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.error(f"Error posting webhook to {webhook.url} ({response.status_code})")
|
||||
|
||||
session.close()
|
||||
|
||||
|
||||
def update_group_webhooks():
|
||||
session = create_session()
|
||||
db = get_database(session)
|
||||
|
||||
webhooks: list[ReadWebhook] = db.webhooks.get_all()
|
||||
|
||||
for webhook in webhooks:
|
||||
cron = Cron.parse(webhook.time)
|
||||
|
||||
job_func = ScheduledFunc(
|
||||
id=webhook.id,
|
||||
name=f"Group {webhook.group_id} webhook",
|
||||
callback=post_webhooks,
|
||||
hour=cron.hours,
|
||||
minute=cron.minutes,
|
||||
args=(webhook.id),
|
||||
)
|
||||
|
||||
SchedulerService.add_cron_job(job_func)
|
||||
@@ -6,7 +6,7 @@ from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, validator
|
||||
|
||||
from mealie.core.config import settings
|
||||
from mealie.core.config import get_app_settings
|
||||
from mealie.schema.recipe import RecipeIngredient
|
||||
from mealie.schema.recipe.recipe_ingredient import CreateIngredientFood, CreateIngredientUnit
|
||||
|
||||
@@ -15,6 +15,8 @@ from .pre_processor import pre_process_string
|
||||
|
||||
CWD = Path(__file__).parent
|
||||
MODEL_PATH = CWD / "model.crfmodel"
|
||||
settings = get_app_settings()
|
||||
|
||||
|
||||
INGREDIENT_TEXT = [
|
||||
"2 tablespoons honey",
|
||||
|
||||
@@ -4,7 +4,9 @@ import extruct
|
||||
from slugify import slugify
|
||||
from w3lib.html import get_base_url
|
||||
|
||||
from mealie.core.config import app_dirs
|
||||
from mealie.core.config import get_app_dirs
|
||||
|
||||
app_dirs = get_app_dirs()
|
||||
|
||||
LAST_JSON = app_dirs.DEBUG_DIR.joinpath("last_recipe.json")
|
||||
|
||||
|
||||
@@ -8,7 +8,9 @@ from fastapi import HTTPException, status
|
||||
from recipe_scrapers import NoSchemaFoundInWildMode, SchemaScraperFactory, WebsiteNotImplementedError, scrape_me
|
||||
from slugify import slugify
|
||||
|
||||
from mealie.core.config import app_dirs
|
||||
from mealie.core.config import get_app_dirs
|
||||
|
||||
app_dirs = get_app_dirs()
|
||||
from mealie.core.root_logger import get_logger
|
||||
from mealie.schema.recipe import Recipe, RecipeStep
|
||||
from mealie.services.image.image import scrape_image
|
||||
|
||||
66
mealie/services/user_services/password_reset_service.py
Normal file
66
mealie/services/user_services/password_reset_service.py
Normal file
@@ -0,0 +1,66 @@
|
||||
from fastapi import HTTPException, status
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
from mealie.core.root_logger import get_logger
|
||||
from mealie.core.security import hash_password, url_safe_token
|
||||
from mealie.db.database import get_database
|
||||
from mealie.schema.user.user_passwords import SavePasswordResetToken
|
||||
from mealie.services._base_service import BaseService
|
||||
from mealie.services.email import EmailService
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class PasswordResetService(BaseService):
|
||||
def __init__(self, session: Session) -> None:
|
||||
self.db = get_database(session)
|
||||
super().__init__()
|
||||
|
||||
def generate_reset_token(self, email: str) -> SavePasswordResetToken:
|
||||
user = self.db.users.get_one(email, "email")
|
||||
|
||||
if user is None:
|
||||
logger.error(f"failed to create password reset for {email=}: user doesn't exists")
|
||||
# Do not raise exception here as we don't want to confirm to the client that the Email doens't exists
|
||||
return
|
||||
|
||||
# Create Reset Token
|
||||
token = url_safe_token()
|
||||
|
||||
save_token = SavePasswordResetToken(user_id=user.id, token=token)
|
||||
|
||||
return self.db.tokens_pw_reset.create(save_token)
|
||||
|
||||
def send_reset_email(self, email: str):
|
||||
token_entry = self.generate_reset_token(email)
|
||||
|
||||
# Send Email
|
||||
email_servive = EmailService()
|
||||
reset_url = f"{self.settings.BASE_URL}/reset-password?token={token_entry.token}"
|
||||
|
||||
try:
|
||||
email_servive.send_forgot_password(email, reset_url)
|
||||
except Exception as e:
|
||||
logger.error(f"failed to send reset email: {e}")
|
||||
raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR, "Failed to send reset email")
|
||||
|
||||
def reset_password(self, token: str, new_password: str):
|
||||
# Validate Token
|
||||
token_entry = self.db.tokens_pw_reset.get_one(token, "token")
|
||||
|
||||
if token_entry is None:
|
||||
logger.error("failed to reset password: invalid token")
|
||||
raise HTTPException(status.HTTP_400_BAD_REQUEST, "Invalid token")
|
||||
|
||||
user = self.db.users.get_one(token_entry.user_id)
|
||||
# Update Password
|
||||
password_hash = hash_password(new_password)
|
||||
|
||||
new_user = self.db.users.update_password(user.id, password_hash)
|
||||
# Confirm Password
|
||||
if new_user.password != password_hash:
|
||||
logger.error("failed to reset password: invalid password")
|
||||
raise HTTPException(status.HTTP_400_BAD_REQUEST, "Invalid password")
|
||||
|
||||
# Delete Token from DB
|
||||
self.db.tokens_pw_reset.delete(token_entry.token)
|
||||
@@ -30,7 +30,7 @@ class RegistrationService(PublicHttpService[int, str]):
|
||||
group = self._register_new_group()
|
||||
|
||||
elif registration.group_token and registration.group_token != "":
|
||||
token_entry = self.db.group_invite_tokens.get(registration.group_token)
|
||||
token_entry = self.db.group_invite_tokens.get_one(registration.group_token)
|
||||
if not token_entry:
|
||||
raise HTTPException(status.HTTP_400_BAD_REQUEST, {"message": "Invalid group token"})
|
||||
group = self.db.groups.get(token_entry.group_id)
|
||||
|
||||
Reference in New Issue
Block a user