mirror of
				https://github.com/mealie-recipes/mealie.git
				synced 2025-10-31 10:13:32 -04:00 
			
		
		
		
	feat: timeline event for mealplans (#2050)
* added related user to mealplans * made timeline event message actually optional * added task to create events for mealplan recipes * replaced fk constraint ops with bulk ops * fixed event creation and adjusted query range * indentation is hard * added missing recipe id query filter * added tests
This commit is contained in:
		| @@ -0,0 +1,43 @@ | |||||||
|  | """add related user to mealplan | ||||||
|  |  | ||||||
|  | Revision ID: 165d943c64ee | ||||||
|  | Revises: 167eb69066ad | ||||||
|  | Create Date: 2023-01-21 16:54:44.368768 | ||||||
|  |  | ||||||
|  | """ | ||||||
|  | import sqlalchemy as sa | ||||||
|  |  | ||||||
|  | import mealie.db.migration_types | ||||||
|  | from alembic import op | ||||||
|  |  | ||||||
|  | # revision identifiers, used by Alembic. | ||||||
|  | revision = "165d943c64ee" | ||||||
|  | down_revision = "167eb69066ad" | ||||||
|  | branch_labels = None | ||||||
|  | depends_on = None | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def upgrade(): | ||||||
|  |     # ### commands auto generated by Alembic - please adjust! ### | ||||||
|  |     with op.batch_alter_table("group_meal_plans", schema=None) as batch_op: | ||||||
|  |         batch_op.add_column(sa.Column("user_id", mealie.db.migration_types.GUID(), nullable=True)) | ||||||
|  |         batch_op.create_index(batch_op.f("ix_group_meal_plans_user_id"), ["user_id"], unique=False) | ||||||
|  |         batch_op.create_foreign_key("fk_user_mealplans", "users", ["user_id"], ["id"]) | ||||||
|  |  | ||||||
|  |     with op.batch_alter_table("shopping_list_item_recipe_reference", schema=None) as batch_op: | ||||||
|  |         batch_op.alter_column("recipe_scale", existing_type=sa.FLOAT(), nullable=False) | ||||||
|  |  | ||||||
|  |     # ### end Alembic commands ### | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def downgrade(): | ||||||
|  |     # ### commands auto generated by Alembic - please adjust! ### | ||||||
|  |     with op.batch_alter_table("shopping_list_item_recipe_reference", schema=None) as batch_op: | ||||||
|  |         batch_op.alter_column("recipe_scale", existing_type=sa.FLOAT(), nullable=True) | ||||||
|  |  | ||||||
|  |     with op.batch_alter_table("group_meal_plans", schema=None) as batch_op: | ||||||
|  |         batch_op.drop_constraint("fk_user_mealplans", type_="foreignkey") | ||||||
|  |         batch_op.drop_index(batch_op.f("ix_group_meal_plans_user_id")) | ||||||
|  |         batch_op.drop_column("user_id") | ||||||
|  |  | ||||||
|  |     # ### end Alembic commands ### | ||||||
| @@ -56,6 +56,7 @@ async def start_scheduler(): | |||||||
|         tasks.purge_group_registration, |         tasks.purge_group_registration, | ||||||
|         tasks.purge_password_reset_tokens, |         tasks.purge_password_reset_tokens, | ||||||
|         tasks.purge_group_data_exports, |         tasks.purge_group_data_exports, | ||||||
|  |         tasks.create_mealplan_timeline_events, | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|     SchedulerRegistry.register_minutely( |     SchedulerRegistry.register_minutely( | ||||||
|   | |||||||
| @@ -14,6 +14,7 @@ if TYPE_CHECKING: | |||||||
|     from group import Group |     from group import Group | ||||||
|  |  | ||||||
|     from ..recipe import RecipeModel |     from ..recipe import RecipeModel | ||||||
|  |     from ..users import User | ||||||
|  |  | ||||||
|  |  | ||||||
| class GroupMealPlanRules(BaseMixins, SqlAlchemyBase): | class GroupMealPlanRules(BaseMixins, SqlAlchemyBase): | ||||||
| @@ -47,6 +48,8 @@ class GroupMealPlan(SqlAlchemyBase, BaseMixins): | |||||||
|  |  | ||||||
|     group_id: Mapped[GUID | None] = mapped_column(GUID, ForeignKey("groups.id"), index=True) |     group_id: Mapped[GUID | None] = mapped_column(GUID, ForeignKey("groups.id"), index=True) | ||||||
|     group: Mapped[Optional["Group"]] = orm.relationship("Group", back_populates="mealplans") |     group: Mapped[Optional["Group"]] = orm.relationship("Group", back_populates="mealplans") | ||||||
|  |     user_id: Mapped[GUID | None] = mapped_column(GUID, ForeignKey("users.id"), index=True) | ||||||
|  |     user: Mapped[Optional["User"]] = orm.relationship("User", back_populates="mealplans") | ||||||
|  |  | ||||||
|     recipe_id: Mapped[GUID | None] = mapped_column(GUID, ForeignKey("recipes.id"), index=True) |     recipe_id: Mapped[GUID | None] = mapped_column(GUID, ForeignKey("recipes.id"), index=True) | ||||||
|     recipe: Mapped[Optional["RecipeModel"]] = orm.relationship( |     recipe: Mapped[Optional["RecipeModel"]] = orm.relationship( | ||||||
|   | |||||||
| @@ -13,6 +13,7 @@ from .user_to_favorite import users_to_favorites | |||||||
|  |  | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     from ..group import Group |     from ..group import Group | ||||||
|  |     from ..group.mealplan import GroupMealPlan | ||||||
|     from ..recipe import RecipeComment, RecipeModel, RecipeTimelineEvent |     from ..recipe import RecipeComment, RecipeModel, RecipeTimelineEvent | ||||||
|     from .password_reset import PasswordResetModel |     from .password_reset import PasswordResetModel | ||||||
|  |  | ||||||
| @@ -68,6 +69,9 @@ class User(SqlAlchemyBase, BaseMixins): | |||||||
|     owned_recipes: Mapped[Optional["RecipeModel"]] = orm.relationship( |     owned_recipes: Mapped[Optional["RecipeModel"]] = orm.relationship( | ||||||
|         "RecipeModel", single_parent=True, foreign_keys=[owned_recipes_id] |         "RecipeModel", single_parent=True, foreign_keys=[owned_recipes_id] | ||||||
|     ) |     ) | ||||||
|  |     mealplans: Mapped[Optional["GroupMealPlan"]] = orm.relationship( | ||||||
|  |         "GroupMealPlan", order_by="GroupMealPlan.date", **sp_args | ||||||
|  |     ) | ||||||
|  |  | ||||||
|     favorite_recipes: Mapped[list["RecipeModel"]] = orm.relationship( |     favorite_recipes: Mapped[list["RecipeModel"]] = orm.relationship( | ||||||
|         "RecipeModel", secondary=users_to_favorites, back_populates="favorited_by" |         "RecipeModel", secondary=users_to_favorites, back_populates="favorited_by" | ||||||
|   | |||||||
| @@ -83,7 +83,13 @@ class GroupMealplanController(BaseCrudController): | |||||||
|         try: |         try: | ||||||
|             recipe = random_recipes[0] |             recipe = random_recipes[0] | ||||||
|             return self.mixins.create_one( |             return self.mixins.create_one( | ||||||
|                 SavePlanEntry(date=data.date, entry_type=data.entry_type, recipe_id=recipe.id, group_id=self.group_id) |                 SavePlanEntry( | ||||||
|  |                     date=data.date, | ||||||
|  |                     entry_type=data.entry_type, | ||||||
|  |                     recipe_id=recipe.id, | ||||||
|  |                     group_id=self.group_id, | ||||||
|  |                     user_id=self.user.id, | ||||||
|  |                 ) | ||||||
|             ) |             ) | ||||||
|         except IndexError as e: |         except IndexError as e: | ||||||
|             raise HTTPException( |             raise HTTPException( | ||||||
| @@ -118,7 +124,7 @@ class GroupMealplanController(BaseCrudController): | |||||||
|  |  | ||||||
|     @router.post("", response_model=ReadPlanEntry, status_code=201) |     @router.post("", response_model=ReadPlanEntry, status_code=201) | ||||||
|     def create_one(self, data: CreatePlanEntry): |     def create_one(self, data: CreatePlanEntry): | ||||||
|         data = mapper.cast(data, SavePlanEntry, group_id=self.group.id) |         data = mapper.cast(data, SavePlanEntry, group_id=self.group.id, user_id=self.user.id) | ||||||
|         result = self.mixins.create_one(data) |         result = self.mixins.create_one(data) | ||||||
|  |  | ||||||
|         self.publish_event( |         self.publish_event( | ||||||
|   | |||||||
| @@ -40,10 +40,12 @@ class CreatePlanEntry(MealieModel): | |||||||
| class UpdatePlanEntry(CreatePlanEntry): | class UpdatePlanEntry(CreatePlanEntry): | ||||||
|     id: int |     id: int | ||||||
|     group_id: UUID |     group_id: UUID | ||||||
|  |     user_id: UUID | None | ||||||
|  |  | ||||||
|  |  | ||||||
| class SavePlanEntry(CreatePlanEntry): | class SavePlanEntry(CreatePlanEntry): | ||||||
|     group_id: UUID |     group_id: UUID | ||||||
|  |     user_id: UUID | None | ||||||
|  |  | ||||||
|     class Config: |     class Config: | ||||||
|         orm_mode = True |         orm_mode = True | ||||||
|   | |||||||
| @@ -20,7 +20,7 @@ class RecipeTimelineEventIn(MealieModel): | |||||||
|     subject: str |     subject: str | ||||||
|     event_type: TimelineEventType |     event_type: TimelineEventType | ||||||
|  |  | ||||||
|     message: str | None = Field(alias="eventMessage") |     message: str | None = Field(None, alias="eventMessage") | ||||||
|     image: str | None = None |     image: str | None = None | ||||||
|  |  | ||||||
|     timestamp: datetime = datetime.now() |     timestamp: datetime = datetime.now() | ||||||
|   | |||||||
| @@ -1,3 +1,4 @@ | |||||||
|  | from .create_timeline_events import create_mealplan_timeline_events | ||||||
| from .post_webhooks import post_group_webhooks | from .post_webhooks import post_group_webhooks | ||||||
| from .purge_group_exports import purge_group_data_exports | from .purge_group_exports import purge_group_data_exports | ||||||
| from .purge_password_reset import purge_password_reset_tokens | from .purge_password_reset import purge_password_reset_tokens | ||||||
| @@ -5,6 +6,7 @@ from .purge_registration import purge_group_registration | |||||||
| from .reset_locked_users import locked_user_reset | from .reset_locked_users import locked_user_reset | ||||||
|  |  | ||||||
| __all__ = [ | __all__ = [ | ||||||
|  |     "create_mealplan_timeline_events", | ||||||
|     "post_group_webhooks", |     "post_group_webhooks", | ||||||
|     "purge_password_reset_tokens", |     "purge_password_reset_tokens", | ||||||
|     "purge_group_data_exports", |     "purge_group_data_exports", | ||||||
|   | |||||||
							
								
								
									
										120
									
								
								mealie/services/scheduler/tasks/create_timeline_events.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										120
									
								
								mealie/services/scheduler/tasks/create_timeline_events.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,120 @@ | |||||||
|  | from datetime import datetime, time, timedelta, timezone | ||||||
|  |  | ||||||
|  | from pydantic import UUID4 | ||||||
|  |  | ||||||
|  | from mealie.db.db_setup import session_context | ||||||
|  | from mealie.repos.all_repositories import get_repositories | ||||||
|  | from mealie.schema.meal_plan.new_meal import PlanEntryType | ||||||
|  | from mealie.schema.recipe.recipe import Recipe, RecipeSummary | ||||||
|  | from mealie.schema.recipe.recipe_timeline_events import ( | ||||||
|  |     RecipeTimelineEventCreate, | ||||||
|  |     TimelineEventType, | ||||||
|  | ) | ||||||
|  | from mealie.schema.response.pagination import PaginationQuery | ||||||
|  | from mealie.schema.user.user import DEFAULT_INTEGRATION_ID | ||||||
|  | from mealie.services.event_bus_service.event_bus_service import EventBusService | ||||||
|  | from mealie.services.event_bus_service.event_types import ( | ||||||
|  |     EventOperation, | ||||||
|  |     EventRecipeData, | ||||||
|  |     EventRecipeTimelineEventData, | ||||||
|  |     EventTypes, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def create_mealplan_timeline_events(group_id: UUID4 | None = None): | ||||||
|  |     event_time = datetime.now(timezone.utc) | ||||||
|  |  | ||||||
|  |     with session_context() as session: | ||||||
|  |         repos = get_repositories(session) | ||||||
|  |         event_bus_service = EventBusService(session=session, group_id=group_id) | ||||||
|  |  | ||||||
|  |         timeline_events_to_create: list[RecipeTimelineEventCreate] = [] | ||||||
|  |         recipes_to_update: dict[UUID4, RecipeSummary] = {} | ||||||
|  |         recipe_id_to_slug_map: dict[UUID4, str] = {} | ||||||
|  |  | ||||||
|  |         if group_id is None: | ||||||
|  |             # if not specified, we check all groups | ||||||
|  |             groups_data = repos.groups.page_all(PaginationQuery(page=1, per_page=-1)) | ||||||
|  |             group_ids = [group.id for group in groups_data.items] | ||||||
|  |  | ||||||
|  |         else: | ||||||
|  |             group_ids = [group_id] | ||||||
|  |  | ||||||
|  |         for group_id in group_ids: | ||||||
|  |             mealplans = repos.meals.get_today(group_id) | ||||||
|  |             for mealplan in mealplans: | ||||||
|  |                 if not (mealplan.recipe and mealplan.user_id): | ||||||
|  |                     continue | ||||||
|  |  | ||||||
|  |                 user = repos.users.get_one(mealplan.user_id) | ||||||
|  |                 if not user: | ||||||
|  |                     continue | ||||||
|  |  | ||||||
|  |                 # TODO: make this translatable | ||||||
|  |                 if mealplan.entry_type == PlanEntryType.side: | ||||||
|  |                     event_subject = f"{user.full_name} made this as a side" | ||||||
|  |  | ||||||
|  |                 else: | ||||||
|  |                     event_subject = f"{user.full_name} made this for {mealplan.entry_type.value}" | ||||||
|  |  | ||||||
|  |                 query_start_time = datetime.combine(datetime.now(timezone.utc).date(), time.min) | ||||||
|  |                 query_end_time = query_start_time + timedelta(days=1) | ||||||
|  |                 query = PaginationQuery( | ||||||
|  |                     query_filter=( | ||||||
|  |                         f'recipe_id = "{mealplan.recipe_id}" ' | ||||||
|  |                         f'AND timestamp >= "{query_start_time.isoformat()}" ' | ||||||
|  |                         f'AND timestamp < "{query_end_time.isoformat()}" ' | ||||||
|  |                         f'AND subject = "{event_subject}"' | ||||||
|  |                     ) | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|  |                 # if this event already exists, don't create it again | ||||||
|  |                 events = repos.recipe_timeline_events.page_all(pagination=query) | ||||||
|  |                 if events.items: | ||||||
|  |                     continue | ||||||
|  |  | ||||||
|  |                 # bump up the last made date | ||||||
|  |                 last_made = mealplan.recipe.last_made | ||||||
|  |                 if ( | ||||||
|  |                     not last_made or last_made.date() < event_time.date() | ||||||
|  |                 ) and mealplan.recipe_id not in recipes_to_update: | ||||||
|  |                     recipes_to_update[mealplan.recipe_id] = mealplan.recipe | ||||||
|  |  | ||||||
|  |                 timeline_events_to_create.append( | ||||||
|  |                     RecipeTimelineEventCreate( | ||||||
|  |                         user_id=user.id, | ||||||
|  |                         subject=event_subject, | ||||||
|  |                         event_type=TimelineEventType.info, | ||||||
|  |                         timestamp=event_time, | ||||||
|  |                         recipe_id=mealplan.recipe_id, | ||||||
|  |                     ) | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|  |                 recipe_id_to_slug_map[mealplan.recipe_id] = mealplan.recipe.slug | ||||||
|  |  | ||||||
|  |         if not timeline_events_to_create: | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         # TODO: use bulk operations | ||||||
|  |         for event in timeline_events_to_create: | ||||||
|  |             new_event = repos.recipe_timeline_events.create(event) | ||||||
|  |             event_bus_service.dispatch( | ||||||
|  |                 integration_id=DEFAULT_INTEGRATION_ID, | ||||||
|  |                 group_id=group_id,  # type: ignore | ||||||
|  |                 event_type=EventTypes.recipe_updated, | ||||||
|  |                 document_data=EventRecipeTimelineEventData( | ||||||
|  |                     operation=EventOperation.create, | ||||||
|  |                     recipe_slug=recipe_id_to_slug_map[new_event.recipe_id], | ||||||
|  |                     recipe_timeline_event_id=new_event.id, | ||||||
|  |                 ), | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         for recipe in recipes_to_update.values(): | ||||||
|  |             recipe.last_made = event_time | ||||||
|  |             repos.recipes.update(recipe.slug, recipe.cast(Recipe)) | ||||||
|  |             event_bus_service.dispatch( | ||||||
|  |                 integration_id=DEFAULT_INTEGRATION_ID, | ||||||
|  |                 group_id=group_id,  # type: ignore | ||||||
|  |                 event_type=EventTypes.recipe_updated, | ||||||
|  |                 document_data=EventRecipeData(operation=EventOperation.update, recipe_slug=recipe.slug), | ||||||
|  |             ) | ||||||
| @@ -4,7 +4,7 @@ from mealie.core.config import get_app_settings | |||||||
| from mealie.services.backups_v2.alchemy_exporter import AlchemyExporter | from mealie.services.backups_v2.alchemy_exporter import AlchemyExporter | ||||||
|  |  | ||||||
| ALEMBIC_VERSIONS = [ | ALEMBIC_VERSIONS = [ | ||||||
|     {"version_num": "167eb69066ad"}, |     {"version_num": "165d943c64ee"}, | ||||||
| ] | ] | ||||||
|  |  | ||||||
|  |  | ||||||
|   | |||||||
| @@ -0,0 +1,186 @@ | |||||||
|  | from datetime import date, datetime, timedelta | ||||||
|  |  | ||||||
|  | from fastapi.testclient import TestClient | ||||||
|  | from pydantic import UUID4 | ||||||
|  |  | ||||||
|  | from mealie.schema.meal_plan.new_meal import CreatePlanEntry | ||||||
|  | from mealie.schema.recipe.recipe import RecipeSummary | ||||||
|  | from mealie.services.scheduler.tasks.create_timeline_events import ( | ||||||
|  |     create_mealplan_timeline_events, | ||||||
|  | ) | ||||||
|  | from tests import utils | ||||||
|  | from tests.utils import api_routes | ||||||
|  | from tests.utils.factories import random_int, random_string | ||||||
|  | from tests.utils.fixture_schemas import TestUser | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_no_mealplans(): | ||||||
|  |     # make sure this task runs successfully even if it doesn't do anything | ||||||
|  |     create_mealplan_timeline_events() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_new_mealplan_event(api_client: TestClient, unique_user: TestUser): | ||||||
|  |     recipe_name = random_string(length=25) | ||||||
|  |     response = api_client.post(api_routes.recipes, json={"name": recipe_name}, headers=unique_user.token) | ||||||
|  |     assert response.status_code == 201 | ||||||
|  |  | ||||||
|  |     response = api_client.get(api_routes.recipes_slug(recipe_name), headers=unique_user.token) | ||||||
|  |     recipe = RecipeSummary.parse_obj(response.json()) | ||||||
|  |     recipe_id = recipe.id | ||||||
|  |     assert recipe.last_made is None | ||||||
|  |  | ||||||
|  |     # store the number of events, so we can compare later | ||||||
|  |     response = api_client.get(api_routes.recipes_slug_timeline_events(recipe_name), headers=unique_user.token) | ||||||
|  |     response_json = response.json() | ||||||
|  |     initial_event_count = len(response_json["items"]) | ||||||
|  |  | ||||||
|  |     new_plan = CreatePlanEntry(date=date.today(), entry_type="dinner", recipe_id=recipe_id).dict(by_alias=True) | ||||||
|  |     new_plan["date"] = date.today().isoformat() | ||||||
|  |     new_plan["recipeId"] = str(recipe_id) | ||||||
|  |  | ||||||
|  |     response = api_client.post(api_routes.groups_mealplans, json=new_plan, headers=unique_user.token) | ||||||
|  |     assert response.status_code == 201 | ||||||
|  |  | ||||||
|  |     # run the task and check to make sure a new event was created from the mealplan | ||||||
|  |     create_mealplan_timeline_events() | ||||||
|  |  | ||||||
|  |     params = {"page": "1", "perPage": "-1", "orderBy": "created_at", "orderDirection": "desc"} | ||||||
|  |     response = api_client.get( | ||||||
|  |         api_routes.recipes_slug_timeline_events(recipe_name), headers=unique_user.token, params=params | ||||||
|  |     ) | ||||||
|  |     response_json = response.json() | ||||||
|  |     assert len(response_json["items"]) == initial_event_count + 1 | ||||||
|  |  | ||||||
|  |     # make sure the mealplan entry type is in the subject | ||||||
|  |     event = response_json["items"][0] | ||||||
|  |     assert new_plan["entryType"].lower() in event["subject"].lower() | ||||||
|  |  | ||||||
|  |     # make sure the recipe's last made date was updated | ||||||
|  |     response = api_client.get(api_routes.recipes_slug(recipe_name), headers=unique_user.token) | ||||||
|  |     recipe = RecipeSummary.parse_obj(response.json()) | ||||||
|  |     assert recipe.last_made.date() == date.today()  # type: ignore | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_new_mealplan_event_duplicates(api_client: TestClient, unique_user: TestUser): | ||||||
|  |     recipe_name = random_string(length=25) | ||||||
|  |     response = api_client.post(api_routes.recipes, json={"name": recipe_name}, headers=unique_user.token) | ||||||
|  |     assert response.status_code == 201 | ||||||
|  |  | ||||||
|  |     response = api_client.get(api_routes.recipes_slug(recipe_name), headers=unique_user.token) | ||||||
|  |     recipe = RecipeSummary.parse_obj(response.json()) | ||||||
|  |     recipe_id = recipe.id | ||||||
|  |  | ||||||
|  |     # store the number of events, so we can compare later | ||||||
|  |     response = api_client.get(api_routes.recipes_slug_timeline_events(recipe_name), headers=unique_user.token) | ||||||
|  |     response_json = response.json() | ||||||
|  |     initial_event_count = len(response_json["items"]) | ||||||
|  |  | ||||||
|  |     new_plan = CreatePlanEntry(date=date.today(), entry_type="dinner", recipe_id=recipe_id).dict(by_alias=True) | ||||||
|  |     new_plan["date"] = date.today().isoformat() | ||||||
|  |     new_plan["recipeId"] = str(recipe_id) | ||||||
|  |  | ||||||
|  |     response = api_client.post(api_routes.groups_mealplans, json=new_plan, headers=unique_user.token) | ||||||
|  |     assert response.status_code == 201 | ||||||
|  |  | ||||||
|  |     # run the task multiple times and make sure we only create one event | ||||||
|  |     for _ in range(3): | ||||||
|  |         create_mealplan_timeline_events() | ||||||
|  |  | ||||||
|  |     params = {"page": "1", "perPage": "-1", "orderBy": "created_at", "orderDirection": "desc"} | ||||||
|  |     response = api_client.get( | ||||||
|  |         api_routes.recipes_slug_timeline_events(recipe_name), headers=unique_user.token, params=params | ||||||
|  |     ) | ||||||
|  |     response_json = response.json() | ||||||
|  |     assert len(response_json["items"]) == initial_event_count + 1 | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_new_mealplan_events_with_multiple_recipes(api_client: TestClient, unique_user: TestUser): | ||||||
|  |     recipes: list[RecipeSummary] = [] | ||||||
|  |     for _ in range(3): | ||||||
|  |         recipe_name = random_string(length=25) | ||||||
|  |         response = api_client.post(api_routes.recipes, json={"name": recipe_name}, headers=unique_user.token) | ||||||
|  |         assert response.status_code == 201 | ||||||
|  |  | ||||||
|  |         response = api_client.get(api_routes.recipes_slug(recipe_name), headers=unique_user.token) | ||||||
|  |         recipes.append(RecipeSummary.parse_obj(response.json())) | ||||||
|  |  | ||||||
|  |     # store the number of events, so we can compare later | ||||||
|  |     response = api_client.get(api_routes.recipes_slug_timeline_events(str(recipes[0].slug)), headers=unique_user.token) | ||||||
|  |     response_json = response.json() | ||||||
|  |     initial_event_count = len(response_json["items"]) | ||||||
|  |  | ||||||
|  |     # create a few mealplans for each recipe | ||||||
|  |     mealplan_count_by_recipe_id: dict[UUID4, int] = {} | ||||||
|  |     for recipe in recipes: | ||||||
|  |         mealplan_count_by_recipe_id[recipe.id] = 0  # type: ignore | ||||||
|  |         for _ in range(random_int(1, 5)): | ||||||
|  |             new_plan = CreatePlanEntry(date=date.today(), entry_type="dinner", recipe_id=str(recipe.id)).dict( | ||||||
|  |                 by_alias=True | ||||||
|  |             ) | ||||||
|  |             new_plan["date"] = date.today().isoformat() | ||||||
|  |             new_plan["recipeId"] = str(recipe.id) | ||||||
|  |  | ||||||
|  |             response = api_client.post(api_routes.groups_mealplans, json=new_plan, headers=unique_user.token) | ||||||
|  |             assert response.status_code == 201 | ||||||
|  |             mealplan_count_by_recipe_id[recipe.id] += 1  # type: ignore | ||||||
|  |  | ||||||
|  |     # run the task once and make sure the event counts are correct | ||||||
|  |     create_mealplan_timeline_events() | ||||||
|  |  | ||||||
|  |     for recipe in recipes: | ||||||
|  |         target_count = initial_event_count + mealplan_count_by_recipe_id[recipe.id]  # type: ignore | ||||||
|  |         params = {"page": "1", "perPage": "-1", "orderBy": "created_at", "orderDirection": "desc"} | ||||||
|  |         response = api_client.get( | ||||||
|  |             api_routes.recipes_slug_timeline_events(recipe.slug), headers=unique_user.token, params=params | ||||||
|  |         ) | ||||||
|  |         response_json = response.json() | ||||||
|  |         assert len(response_json["items"]) == target_count | ||||||
|  |  | ||||||
|  |     # run the task a few more times and confirm the counts are the same | ||||||
|  |     for _ in range(3): | ||||||
|  |         create_mealplan_timeline_events() | ||||||
|  |  | ||||||
|  |     for recipe in recipes: | ||||||
|  |         target_count = initial_event_count + mealplan_count_by_recipe_id[recipe.id]  # type: ignore | ||||||
|  |         params = { | ||||||
|  |             "page": "1", | ||||||
|  |             "perPage": "-1", | ||||||
|  |             "orderBy": "created_at", | ||||||
|  |             "orderDirection": "desc", | ||||||
|  |         } | ||||||
|  |         response = api_client.get( | ||||||
|  |             api_routes.recipes_slug_timeline_events(recipe.slug), headers=unique_user.token, params=params | ||||||
|  |         ) | ||||||
|  |         response_json = response.json() | ||||||
|  |         assert len(response_json["items"]) == target_count | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_preserve_future_made_date(api_client: TestClient, unique_user: TestUser): | ||||||
|  |     recipe_name = random_string(length=25) | ||||||
|  |     response = api_client.post(api_routes.recipes, json={"name": recipe_name}, headers=unique_user.token) | ||||||
|  |     assert response.status_code == 201 | ||||||
|  |  | ||||||
|  |     response = api_client.get(api_routes.recipes_slug(recipe_name), headers=unique_user.token) | ||||||
|  |     recipe = RecipeSummary.parse_obj(response.json()) | ||||||
|  |     recipe_id = str(recipe.id) | ||||||
|  |  | ||||||
|  |     future_dt = datetime.now() + timedelta(days=random_int(1, 10)) | ||||||
|  |     recipe.last_made = future_dt | ||||||
|  |     response = api_client.put( | ||||||
|  |         api_routes.recipes_slug(recipe.slug), json=utils.jsonify(recipe), headers=unique_user.token | ||||||
|  |     ) | ||||||
|  |     assert response.status_code == 200 | ||||||
|  |  | ||||||
|  |     new_plan = CreatePlanEntry(date=date.today(), entry_type="dinner", recipe_id=recipe_id).dict(by_alias=True) | ||||||
|  |     new_plan["date"] = date.today().isoformat() | ||||||
|  |     new_plan["recipeId"] = str(recipe_id) | ||||||
|  |  | ||||||
|  |     response = api_client.post(api_routes.groups_mealplans, json=new_plan, headers=unique_user.token) | ||||||
|  |     assert response.status_code == 201 | ||||||
|  |  | ||||||
|  |     # run the task and make sure the recipe's last made date was not updated | ||||||
|  |     create_mealplan_timeline_events() | ||||||
|  |  | ||||||
|  |     response = api_client.get(api_routes.recipes_slug(recipe_name), headers=unique_user.token) | ||||||
|  |     recipe = RecipeSummary.parse_obj(response.json()) | ||||||
|  |     assert recipe.last_made == future_dt | ||||||
		Reference in New Issue
	
	Block a user