feat: Add Households to Mealie (#3970)

This commit is contained in:
Michael Genson
2024-08-22 10:14:32 -05:00
committed by GitHub
parent 0c29cef17d
commit eb170cc7e5
315 changed files with 6975 additions and 3577 deletions

View File

@@ -1,6 +1,7 @@
from datetime import datetime, time, timedelta, timezone
from pydantic import UUID4
from sqlalchemy.orm import Session
from mealie.db.db_setup import session_context
from mealie.repos.all_repositories import get_repositories
@@ -18,99 +19,109 @@ from mealie.services.event_bus_service.event_types import (
)
def create_mealplan_timeline_events(group_id: UUID4 | None = None):
def _create_mealplan_timeline_events_for_household(
event_time: datetime, session: Session, group_id: UUID4, household_id: UUID4
) -> None:
repos = get_repositories(session, group_id=group_id, household_id=household_id)
event_bus_service = EventBusService(session=session)
timeline_events_to_create: list[RecipeTimelineEventCreate] = []
recipes_to_update: dict[UUID4, RecipeSummary] = {}
recipe_id_to_slug_map: dict[UUID4, str] = {}
mealplans = repos.meals.get_today()
for mealplan in mealplans:
if not (mealplan.recipe and mealplan.user_id):
continue
user = repos.users.get_one(mealplan.user_id)
if not user:
continue
# TODO: make this translatable
if mealplan.entry_type == PlanEntryType.side:
event_subject = f"{user.full_name} made this as a side"
else:
event_subject = f"{user.full_name} made this for {mealplan.entry_type.value}"
query_start_time = datetime.combine(datetime.now(timezone.utc).date(), time.min)
query_end_time = query_start_time + timedelta(days=1)
query = PaginationQuery(
query_filter=(
f'recipe_id = "{mealplan.recipe_id}" '
f'AND timestamp >= "{query_start_time.isoformat()}" '
f'AND timestamp < "{query_end_time.isoformat()}" '
f'AND subject = "{event_subject}"'
)
)
# if this event already exists, don't create it again
events = repos.recipe_timeline_events.page_all(pagination=query)
if events.items:
continue
# bump up the last made date
last_made = mealplan.recipe.last_made
if (not last_made or last_made.date() < event_time.date()) and mealplan.recipe_id not in recipes_to_update:
recipes_to_update[mealplan.recipe_id] = mealplan.recipe
timeline_events_to_create.append(
RecipeTimelineEventCreate(
user_id=user.id,
subject=event_subject,
event_type=TimelineEventType.info,
timestamp=event_time,
recipe_id=mealplan.recipe_id,
)
)
recipe_id_to_slug_map[mealplan.recipe_id] = mealplan.recipe.slug
if not timeline_events_to_create:
return
# TODO: use bulk operations
for event in timeline_events_to_create:
new_event = repos.recipe_timeline_events.create(event)
event_bus_service.dispatch(
integration_id=DEFAULT_INTEGRATION_ID,
group_id=group_id,
household_id=household_id,
event_type=EventTypes.recipe_updated,
document_data=EventRecipeTimelineEventData(
operation=EventOperation.create,
recipe_slug=recipe_id_to_slug_map[new_event.recipe_id],
recipe_timeline_event_id=new_event.id,
),
)
for recipe in recipes_to_update.values():
repos.recipes.patch(recipe.slug, {"last_made": event_time})
event_bus_service.dispatch(
integration_id=DEFAULT_INTEGRATION_ID,
group_id=group_id,
household_id=household_id,
event_type=EventTypes.recipe_updated,
document_data=EventRecipeData(operation=EventOperation.update, recipe_slug=recipe.slug),
)
def _create_mealplan_timeline_events_for_group(event_time: datetime, session: Session, group_id: UUID4) -> None:
repos = get_repositories(session, group_id=group_id)
households_data = repos.households.page_all(PaginationQuery(page=1, per_page=-1))
household_ids = [household.id for household in households_data.items]
for household_id in household_ids:
_create_mealplan_timeline_events_for_household(event_time, session, group_id, household_id)
def create_mealplan_timeline_events() -> None:
event_time = datetime.now(timezone.utc)
with session_context() as session:
repos = get_repositories(session)
if group_id is None:
# if not specified, we check all groups
groups_data = repos.groups.page_all(PaginationQuery(page=1, per_page=-1))
group_ids = [group.id for group in groups_data.items]
else:
group_ids = [group_id]
groups_data = repos.groups.page_all(PaginationQuery(page=1, per_page=-1))
group_ids = [group.id for group in groups_data.items]
for group_id in group_ids:
event_bus_service = EventBusService(session=session, group_id=group_id)
timeline_events_to_create: list[RecipeTimelineEventCreate] = []
recipes_to_update: dict[UUID4, RecipeSummary] = {}
recipe_id_to_slug_map: dict[UUID4, str] = {}
mealplans = repos.meals.get_today(group_id)
for mealplan in mealplans:
if not (mealplan.recipe and mealplan.user_id):
continue
user = repos.users.get_one(mealplan.user_id)
if not user:
continue
# TODO: make this translatable
if mealplan.entry_type == PlanEntryType.side:
event_subject = f"{user.full_name} made this as a side"
else:
event_subject = f"{user.full_name} made this for {mealplan.entry_type.value}"
query_start_time = datetime.combine(datetime.now(timezone.utc).date(), time.min)
query_end_time = query_start_time + timedelta(days=1)
query = PaginationQuery(
query_filter=(
f'recipe_id = "{mealplan.recipe_id}" '
f'AND timestamp >= "{query_start_time.isoformat()}" '
f'AND timestamp < "{query_end_time.isoformat()}" '
f'AND subject = "{event_subject}"'
)
)
# if this event already exists, don't create it again
events = repos.recipe_timeline_events.page_all(pagination=query)
if events.items:
continue
# bump up the last made date
last_made = mealplan.recipe.last_made
if (
not last_made or last_made.date() < event_time.date()
) and mealplan.recipe_id not in recipes_to_update:
recipes_to_update[mealplan.recipe_id] = mealplan.recipe
timeline_events_to_create.append(
RecipeTimelineEventCreate(
user_id=user.id,
subject=event_subject,
event_type=TimelineEventType.info,
timestamp=event_time,
recipe_id=mealplan.recipe_id,
)
)
recipe_id_to_slug_map[mealplan.recipe_id] = mealplan.recipe.slug
if not timeline_events_to_create:
return
# TODO: use bulk operations
for event in timeline_events_to_create:
new_event = repos.recipe_timeline_events.create(event)
event_bus_service.dispatch(
integration_id=DEFAULT_INTEGRATION_ID,
group_id=group_id,
event_type=EventTypes.recipe_updated,
document_data=EventRecipeTimelineEventData(
operation=EventOperation.create,
recipe_slug=recipe_id_to_slug_map[new_event.recipe_id],
recipe_timeline_event_id=new_event.id,
),
)
for recipe in recipes_to_update.values():
repos.recipes.patch(recipe.slug, {"last_made": event_time})
event_bus_service.dispatch(
integration_id=DEFAULT_INTEGRATION_ID,
group_id=group_id,
event_type=EventTypes.recipe_updated,
document_data=EventRecipeData(operation=EventOperation.update, recipe_slug=recipe.slug),
)
_create_mealplan_timeline_events_for_group(event_time, session, group_id)

View File

@@ -4,21 +4,28 @@ from pydantic import UUID4
from mealie.db.db_setup import session_context
from mealie.repos.all_repositories import get_repositories
from mealie.routes.groups.controller_shopping_lists import publish_list_item_events
from mealie.routes.households.controller_shopping_lists import publish_list_item_events
from mealie.schema.response.pagination import OrderDirection, PaginationQuery
from mealie.schema.user.user import DEFAULT_INTEGRATION_ID
from mealie.services.event_bus_service.event_bus_service import EventBusService
from mealie.services.event_bus_service.event_types import EventDocumentDataBase, EventTypes
from mealie.services.group_services.shopping_lists import ShoppingListService
from mealie.services.household_services.shopping_lists import ShoppingListService
MAX_CHECKED_ITEMS = 100
def _create_publish_event(event_bus_service: EventBusService, group_id: UUID4):
def publish_event(event_type: EventTypes, document_data: EventDocumentDataBase, message: str = ""):
def _create_publish_event(event_bus_service: EventBusService):
def publish_event(
event_type: EventTypes,
document_data: EventDocumentDataBase,
group_id: UUID4,
household_id: UUID4 | None,
message: str = "",
):
event_bus_service.dispatch(
integration_id=DEFAULT_INTEGRATION_ID,
group_id=group_id,
household_id=household_id,
event_type=event_type,
document_data=document_data,
message=message,
@@ -32,7 +39,7 @@ def _trim_list_items(shopping_list_service: ShoppingListService, shopping_list_i
page=1,
per_page=-1,
query_filter=f'shopping_list_id="{shopping_list_id}" AND checked=true',
order_by="update_at",
order_by="updated_at",
order_direction=OrderDirection.desc,
)
query = shopping_list_service.list_items.page_all(pagination)
@@ -44,28 +51,25 @@ def _trim_list_items(shopping_list_service: ShoppingListService, shopping_list_i
publish_list_item_events(event_publisher, items_response)
def delete_old_checked_list_items(group_id: UUID4 | None = None):
def delete_old_checked_list_items():
with session_context() as session:
repos = get_repositories(session)
if group_id is None:
# if not specified, we check all groups
groups = repos.groups.page_all(PaginationQuery(page=1, per_page=-1)).items
else:
group = repos.groups.get_one(group_id)
if not group:
raise Exception(f'Group not found: "{group_id}"')
groups = [group]
groups = repos.groups.page_all(PaginationQuery(page=1, per_page=-1)).items
for group in groups:
event_bus_service = EventBusService(session=session, group_id=group.id)
# user is passed as None since we don't use it here
shopping_list_service = ShoppingListService(repos, group, None) # type: ignore
shopping_list_data = repos.group_shopping_lists.by_group(group.id).page_all(
PaginationQuery(page=1, per_page=-1)
)
for shopping_list in shopping_list_data.items:
_trim_list_items(
shopping_list_service, shopping_list.id, _create_publish_event(event_bus_service, group.id)
)
group_repos = get_repositories(session, group_id=group.id)
households = group_repos.households.page_all(PaginationQuery(page=1, per_page=-1)).items
event_bus_service = EventBusService(session=session)
event_publisher = _create_publish_event(event_bus_service)
for household in households:
household_repos = get_repositories(session, group_id=group.id, household_id=household.id)
shopping_list_service = ShoppingListService(household_repos)
shopping_list_data = household_repos.group_shopping_lists.page_all(PaginationQuery(page=1, per_page=-1))
for shopping_list in shopping_list_data.items:
_trim_list_items(
shopping_list_service,
shopping_list.id,
event_publisher,
)

View File

@@ -4,7 +4,7 @@ from pydantic import UUID4
from mealie.db.db_setup import session_context
from mealie.repos.all_repositories import get_repositories
from mealie.schema.group.webhook import ReadWebhook
from mealie.schema.household.webhook import ReadWebhook
from mealie.schema.response.pagination import PaginationQuery
from mealie.services.event_bus_service.event_bus_listeners import WebhookEventListener
from mealie.services.event_bus_service.event_bus_service import EventBusService
@@ -21,7 +21,9 @@ from mealie.services.event_bus_service.event_types import (
last_ran = datetime.now(timezone.utc)
def post_group_webhooks(start_dt: datetime | None = None, group_id: UUID4 | None = None) -> None:
def post_group_webhooks(
start_dt: datetime | None = None, group_id: UUID4 | None = None, household_id: UUID4 | None = None
) -> None:
"""Post webhook events to specified group, or all groups"""
global last_ran
@@ -58,13 +60,23 @@ def post_group_webhooks(start_dt: datetime | None = None, group_id: UUID4 | None
)
for group_id in group_ids:
event_bus = EventBusService(group_id=group_id)
event_bus.dispatch(
integration_id=INTERNAL_INTEGRATION_ID,
group_id=group_id,
event_type=event_type,
document_data=event_document_data,
)
if household_id is None:
with session_context() as session:
household_repos = get_repositories(session, group_id=group_id)
households_data = household_repos.households.page_all(PaginationQuery(page=1, per_page=-1))
household_ids = [household.id for household in households_data.items]
else:
household_ids = [household_id]
for household_id in household_ids:
event_bus = EventBusService()
event_bus.dispatch(
integration_id=INTERNAL_INTEGRATION_ID,
group_id=group_id,
household_id=household_id,
event_type=event_type,
document_data=event_document_data,
)
def post_single_webhook(webhook: ReadWebhook, message: str = "") -> None:
@@ -84,5 +96,5 @@ def post_single_webhook(webhook: ReadWebhook, message: str = "") -> None:
document_data=event_document_data,
)
listener = WebhookEventListener(webhook.group_id)
listener = WebhookEventListener(webhook.group_id, webhook.household_id)
listener.publish_to_subscribers(event, [webhook])

View File

@@ -4,7 +4,7 @@ from sqlalchemy import delete
from mealie.core import root_logger
from mealie.db.db_setup import session_context
from mealie.db.models.group import GroupInviteToken
from mealie.db.models.household import GroupInviteToken
logger = root_logger.get_logger()