mirror of
https://github.com/mealie-recipes/mealie.git
synced 2025-10-27 00:04:23 -04:00
fix: Lint Python code with ruff (#3799)
This commit is contained in:
2
tests/fixtures/fixture_users.py
vendored
2
tests/fixtures/fixture_users.py
vendored
@@ -1,5 +1,5 @@
|
||||
import json
|
||||
from typing import Generator
|
||||
from collections.abc import Generator
|
||||
|
||||
from pytest import fixture
|
||||
from starlette.testclient import TestClient
|
||||
|
||||
@@ -26,7 +26,7 @@ def test_public_about_get_app_info(api_client: TestClient, is_private_group: boo
|
||||
assert as_dict["allowSignup"] == settings.ALLOW_SIGNUP
|
||||
|
||||
if is_private_group:
|
||||
assert as_dict["defaultGroupSlug"] == None
|
||||
assert as_dict["defaultGroupSlug"] is None
|
||||
else:
|
||||
assert as_dict["defaultGroupSlug"] == group.slug
|
||||
|
||||
|
||||
@@ -104,7 +104,7 @@ def test_bad_mealie_alpha_data_is_ignored(api_client: TestClient, unique_user: T
|
||||
with open(invalid_json_path, "w"):
|
||||
pass # write nothing to the file, which is invalid JSON
|
||||
except Exception:
|
||||
raise Exception(os.listdir(tmpdir))
|
||||
raise Exception(os.listdir(tmpdir)) # noqa: B904
|
||||
|
||||
modified_test_data = os.path.join(tmpdir, "modified-test-data.zip")
|
||||
with ZipFile(modified_test_data, "w") as zf:
|
||||
|
||||
@@ -49,7 +49,7 @@ def test_group_recipe_actions_get_all(api_client: TestClient, unique_user: TestU
|
||||
|
||||
response = api_client.get(api_routes.groups_recipe_actions, headers=unique_user.token)
|
||||
data = assert_deserialize(response, 200)
|
||||
fetched_ids = set(item["id"] for item in data["items"])
|
||||
fetched_ids = {item["id"] for item in data["items"]}
|
||||
for expected_id in expected_ids:
|
||||
assert expected_id in fetched_ids
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from collections.abc import Generator
|
||||
from pathlib import Path
|
||||
from typing import Generator
|
||||
|
||||
import pytest
|
||||
import sqlalchemy
|
||||
|
||||
@@ -4,8 +4,8 @@ import os
|
||||
import random
|
||||
import shutil
|
||||
import tempfile
|
||||
from collections.abc import Generator
|
||||
from pathlib import Path
|
||||
from typing import Generator
|
||||
from uuid import uuid4
|
||||
from zipfile import ZipFile
|
||||
|
||||
@@ -489,9 +489,9 @@ def test_duplicate(api_client: TestClient, recipe_data: RecipeSiteTestCase, uniq
|
||||
|
||||
# Ingredients should have the same texts, but different ids
|
||||
assert duplicate_recipe["recipeIngredient"] != initial_recipe["recipeIngredient"]
|
||||
assert list(map(lambda i: i["note"], duplicate_recipe["recipeIngredient"])) == list(
|
||||
map(lambda i: i["note"], initial_recipe["recipeIngredient"])
|
||||
)
|
||||
assert [i["note"] for i in duplicate_recipe["recipeIngredient"]] == [
|
||||
i["note"] for i in initial_recipe["recipeIngredient"]
|
||||
]
|
||||
|
||||
previous_categories = initial_recipe["recipeCategory"]
|
||||
assert duplicate_recipe["recipeCategory"] == previous_categories
|
||||
@@ -748,21 +748,21 @@ def test_get_recipes_organizer_filter(
|
||||
# get recipes by organizer
|
||||
if organizer_type == "tags":
|
||||
organizer = random.choice(tags)
|
||||
expected_recipe_ids = set(
|
||||
expected_recipe_ids = {
|
||||
str(recipe.id) for recipe in recipes if organizer.id in [tag.id for tag in recipe.tags or []]
|
||||
)
|
||||
}
|
||||
elif organizer_type == "categories":
|
||||
organizer = random.choice(categories)
|
||||
expected_recipe_ids = set(
|
||||
expected_recipe_ids = {
|
||||
str(recipe.id)
|
||||
for recipe in recipes
|
||||
if organizer.id in [category.id for category in recipe.recipe_category or []]
|
||||
)
|
||||
}
|
||||
elif organizer_type == "tools":
|
||||
organizer = random.choice(tools)
|
||||
expected_recipe_ids = set(
|
||||
expected_recipe_ids = {
|
||||
str(recipe.id) for recipe in recipes if organizer.id in [tool.id for tool in recipe.tools or []]
|
||||
)
|
||||
}
|
||||
else:
|
||||
raise ValueError(f"Unknown organizer type: {organizer_type}")
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from io import BytesIO
|
||||
import json
|
||||
import zipfile
|
||||
from io import BytesIO
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ def test_recipe_ingredients_parser_nlp(api_client: TestClient, unique_user: Test
|
||||
response = api_client.post(api_routes.parser_ingredients, json=payload, headers=unique_user.token)
|
||||
assert response.status_code == 200
|
||||
|
||||
for api_ingredient, test_ingredient in zip(response.json(), test_ingredients):
|
||||
for api_ingredient, test_ingredient in zip(response.json(), test_ingredients, strict=False):
|
||||
assert_ingredient(api_ingredient, test_ingredient)
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import random
|
||||
from typing import Generator
|
||||
from collections.abc import Generator
|
||||
from uuid import UUID
|
||||
|
||||
import pytest
|
||||
@@ -71,8 +71,8 @@ def test_user_recipe_favorites(
|
||||
ratings = response.json()["ratings"]
|
||||
|
||||
assert len(ratings) == len(recipes_to_favorite)
|
||||
fetched_recipe_ids = set(rating["recipeId"] for rating in ratings)
|
||||
favorited_recipe_ids = set(str(recipe.id) for recipe in recipes_to_favorite)
|
||||
fetched_recipe_ids = {rating["recipeId"] for rating in ratings}
|
||||
favorited_recipe_ids = {str(recipe.id) for recipe in recipes_to_favorite}
|
||||
assert fetched_recipe_ids == favorited_recipe_ids
|
||||
|
||||
# remove favorites
|
||||
@@ -87,8 +87,8 @@ def test_user_recipe_favorites(
|
||||
ratings = response.json()["ratings"]
|
||||
|
||||
assert len(ratings) == len(recipes_to_favorite) - len(recipe_favorites_to_remove)
|
||||
fetched_recipe_ids = set(rating["recipeId"] for rating in ratings)
|
||||
removed_recipe_ids = set(str(recipe.id) for recipe in recipe_favorites_to_remove)
|
||||
fetched_recipe_ids = {rating["recipeId"] for rating in ratings}
|
||||
removed_recipe_ids = {str(recipe.id) for recipe in recipe_favorites_to_remove}
|
||||
assert fetched_recipe_ids == favorited_recipe_ids - removed_recipe_ids
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Generator
|
||||
from collections.abc import Generator
|
||||
|
||||
import pytest
|
||||
import sqlalchemy
|
||||
|
||||
@@ -40,7 +40,7 @@ def test_get_all_users_admin(
|
||||
assert response.status_code == 200
|
||||
|
||||
# assert all users from all groups are returned
|
||||
response_user_ids = set(user["id"] for user in response.json()["items"])
|
||||
response_user_ids = {user["id"] for user in response.json()["items"]}
|
||||
for user_id in user_ids:
|
||||
assert user_id in response_user_ids
|
||||
|
||||
@@ -73,7 +73,7 @@ def test_get_all_group_users(
|
||||
|
||||
user_group = database.groups.get_by_slug_or_id(user.group_id)
|
||||
assert user_group
|
||||
same_group_user_ids: set[str] = set([str(user.user_id)])
|
||||
same_group_user_ids: set[str] = {user.user_id}
|
||||
for _ in range(random_int(2, 5)):
|
||||
new_user = database.users.create(
|
||||
{
|
||||
@@ -89,7 +89,7 @@ def test_get_all_group_users(
|
||||
|
||||
response = api_client.get(api_routes.users_group_users, params={"perPage": -1}, headers=user.token)
|
||||
assert response.status_code == 200
|
||||
response_user_ids = set(user["id"] for user in response.json()["items"])
|
||||
response_user_ids = {user["id"] for user in response.json()["items"]}
|
||||
|
||||
# assert only users from the same group are returned
|
||||
for user_id in other_group_user_ids:
|
||||
|
||||
@@ -234,7 +234,10 @@ def test_ldap_user_login_simple_filter(api_client: TestClient):
|
||||
@pytest.mark.skipif(not os.environ.get("GITHUB_ACTIONS", False), reason="requires ldap service in github actions")
|
||||
def test_ldap_user_login_complex_filter(api_client: TestClient):
|
||||
settings = get_app_settings()
|
||||
settings.LDAP_USER_FILTER = "(&(objectClass=inetOrgPerson)(|(memberOf=cn=ship_crew,ou=people,dc=planetexpress,dc=com)(memberOf=cn=admin_staff,ou=people,dc=planetexpress,dc=com)))"
|
||||
settings.LDAP_USER_FILTER = (
|
||||
"(&(objectClass=inetOrgPerson)(|(memberOf=cn=ship_crew,ou=people,dc=planetexpress,dc=com)"
|
||||
"(memberOf=cn=admin_staff,ou=people,dc=planetexpress,dc=com)))"
|
||||
)
|
||||
|
||||
form_data = {"username": "professor", "password": "professor"}
|
||||
response = api_client.post(api_routes.auth_token, data=form_data)
|
||||
|
||||
@@ -3,14 +3,11 @@ import json
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from mealie.core.config import get_app_settings
|
||||
from mealie.db.db_setup import session_context
|
||||
from mealie.repos.repository_factory import AllRepositories
|
||||
from mealie.schema.response.pagination import PaginationQuery
|
||||
from mealie.schema.user.user import ChangePassword, PrivateUser
|
||||
from mealie.schema.user.user import PrivateUser
|
||||
from mealie.services.user_services.password_reset_service import PasswordResetService
|
||||
from tests.utils import api_routes
|
||||
from tests.utils.factories import random_email, random_string
|
||||
from tests.utils.factories import random_string
|
||||
from tests.utils.fixture_schemas import TestUser
|
||||
|
||||
|
||||
@@ -27,7 +24,7 @@ def test_password_reset(api_client: TestClient, unique_user: TestUser, casing: s
|
||||
cased_email += letter.upper()
|
||||
else:
|
||||
cased_email += letter.lower()
|
||||
cased_email
|
||||
assert cased_email
|
||||
|
||||
with session_context() as session:
|
||||
service = PasswordResetService(session)
|
||||
@@ -75,7 +72,7 @@ def test_password_reset_ldap(ldap_user: PrivateUser, casing: str):
|
||||
cased_email += letter.upper()
|
||||
else:
|
||||
cased_email += letter.lower()
|
||||
cased_email
|
||||
assert cased_email
|
||||
|
||||
with session_context() as session:
|
||||
service = PasswordResetService(session)
|
||||
|
||||
@@ -24,7 +24,7 @@ class ABCMultiTenantTestCase(ABC):
|
||||
@abstractmethod
|
||||
def cleanup(self) -> None: ...
|
||||
|
||||
def __enter__(self):
|
||||
def __enter__(self): # noqa: B027
|
||||
pass
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
|
||||
@@ -65,8 +65,8 @@ def test_multitenant_cases_same_named_resources(
|
||||
):
|
||||
"""
|
||||
This test is used to ensure that the same resource can be created with the same values in different tenants.
|
||||
i.e. the same category can exist in multiple groups. This is important to validate that the compound unique constraints
|
||||
are operating in SQLAlchemy correctly.
|
||||
i.e. the same category can exist in multiple groups. This is important to validate that the compound unique
|
||||
constraints are operating in SQLAlchemy correctly.
|
||||
"""
|
||||
user1 = multitenants.user_one
|
||||
user2 = multitenants.user_two
|
||||
|
||||
@@ -308,7 +308,7 @@ def test_pagination_filter_in_advanced(database: AllRepositories, unique_user: T
|
||||
TagSave(group_id=unique_user.group_id, name=slug2, slug=slug2),
|
||||
]
|
||||
|
||||
tag_1, tag_2 = [database.tags.create(tag) for tag in tags]
|
||||
tag_1, tag_2 = (database.tags.create(tag) for tag in tags)
|
||||
|
||||
# Bootstrap the database with recipes
|
||||
slug = random_string()
|
||||
@@ -472,7 +472,7 @@ def test_pagination_filter_logical_namespace_conflict(database: AllRepositories,
|
||||
CategorySave(group_id=unique_user.group_id, name=random_string(10)),
|
||||
CategorySave(group_id=unique_user.group_id, name=random_string(10)),
|
||||
]
|
||||
category_1, category_2 = [database.categories.create(category) for category in categories]
|
||||
category_1, category_2 = (database.categories.create(category) for category in categories)
|
||||
|
||||
# Bootstrap the database with recipes
|
||||
slug = random_string()
|
||||
@@ -528,7 +528,7 @@ def test_pagination_filter_datetimes(
|
||||
dt = past_dt.isoformat()
|
||||
query = PaginationQuery(page=1, per_page=-1, query_filter=f'createdAt>"{dt}"')
|
||||
unit_results = units_repo.page_all(query).items
|
||||
unit_ids = set(unit.id for unit in unit_results)
|
||||
unit_ids = {unit.id for unit in unit_results}
|
||||
assert len(unit_ids) == 3
|
||||
assert unit_1.id in unit_ids
|
||||
assert unit_2.id in unit_ids
|
||||
@@ -537,7 +537,7 @@ def test_pagination_filter_datetimes(
|
||||
dt = unit_1.created_at.isoformat() # type: ignore
|
||||
query = PaginationQuery(page=1, per_page=-1, query_filter=f'createdAt>"{dt}"')
|
||||
unit_results = units_repo.page_all(query).items
|
||||
unit_ids = set(unit.id for unit in unit_results)
|
||||
unit_ids = {unit.id for unit in unit_results}
|
||||
assert len(unit_ids) == 2
|
||||
assert unit_1.id not in unit_ids
|
||||
assert unit_2.id in unit_ids
|
||||
@@ -546,7 +546,7 @@ def test_pagination_filter_datetimes(
|
||||
dt = unit_2.created_at.isoformat() # type: ignore
|
||||
query = PaginationQuery(page=1, per_page=-1, query_filter=f'createdAt>"{dt}"')
|
||||
unit_results = units_repo.page_all(query).items
|
||||
unit_ids = set(unit.id for unit in unit_results)
|
||||
unit_ids = {unit.id for unit in unit_results}
|
||||
assert len(unit_ids) == 1
|
||||
assert unit_1.id not in unit_ids
|
||||
assert unit_2.id not in unit_ids
|
||||
@@ -555,14 +555,14 @@ def test_pagination_filter_datetimes(
|
||||
dt = unit_3.created_at.isoformat() # type: ignore
|
||||
query = PaginationQuery(page=1, per_page=-1, query_filter=f'createdAt>"{dt}"')
|
||||
unit_results = units_repo.page_all(query).items
|
||||
unit_ids = set(unit.id for unit in unit_results)
|
||||
unit_ids = {unit.id for unit in unit_results}
|
||||
assert len(unit_ids) == 0
|
||||
|
||||
future_dt: datetime = unit_3.created_at + timedelta(seconds=1) # type: ignore
|
||||
dt = future_dt.isoformat()
|
||||
query = PaginationQuery(page=1, per_page=-1, query_filter=f'createdAt>"{dt}"')
|
||||
unit_results = units_repo.page_all(query).items
|
||||
unit_ids = set(unit.id for unit in unit_results)
|
||||
unit_ids = {unit.id for unit in unit_results}
|
||||
assert len(unit_ids) == 0
|
||||
|
||||
## GTE
|
||||
@@ -570,7 +570,7 @@ def test_pagination_filter_datetimes(
|
||||
dt = past_dt.isoformat()
|
||||
query = PaginationQuery(page=1, per_page=-1, query_filter=f'createdAt>="{dt}"')
|
||||
unit_results = units_repo.page_all(query).items
|
||||
unit_ids = set(unit.id for unit in unit_results)
|
||||
unit_ids = {unit.id for unit in unit_results}
|
||||
assert len(unit_ids) == 3
|
||||
assert unit_1.id in unit_ids
|
||||
assert unit_2.id in unit_ids
|
||||
@@ -579,7 +579,7 @@ def test_pagination_filter_datetimes(
|
||||
dt = unit_1.created_at.isoformat() # type: ignore
|
||||
query = PaginationQuery(page=1, per_page=-1, query_filter=f'createdAt>="{dt}"')
|
||||
unit_results = units_repo.page_all(query).items
|
||||
unit_ids = set(unit.id for unit in unit_results)
|
||||
unit_ids = {unit.id for unit in unit_results}
|
||||
assert len(unit_ids) == 3
|
||||
assert unit_1.id in unit_ids
|
||||
assert unit_2.id in unit_ids
|
||||
@@ -588,7 +588,7 @@ def test_pagination_filter_datetimes(
|
||||
dt = unit_2.created_at.isoformat() # type: ignore
|
||||
query = PaginationQuery(page=1, per_page=-1, query_filter=f'createdAt>="{dt}"')
|
||||
unit_results = units_repo.page_all(query).items
|
||||
unit_ids = set(unit.id for unit in unit_results)
|
||||
unit_ids = {unit.id for unit in unit_results}
|
||||
assert len(unit_ids) == 2
|
||||
assert unit_1.id not in unit_ids
|
||||
assert unit_2.id in unit_ids
|
||||
@@ -597,7 +597,7 @@ def test_pagination_filter_datetimes(
|
||||
dt = unit_3.created_at.isoformat() # type: ignore
|
||||
query = PaginationQuery(page=1, per_page=-1, query_filter=f'createdAt>="{dt}"')
|
||||
unit_results = units_repo.page_all(query).items
|
||||
unit_ids = set(unit.id for unit in unit_results)
|
||||
unit_ids = {unit.id for unit in unit_results}
|
||||
assert len(unit_ids) == 1
|
||||
assert unit_1.id not in unit_ids
|
||||
assert unit_2.id not in unit_ids
|
||||
@@ -607,7 +607,7 @@ def test_pagination_filter_datetimes(
|
||||
dt = future_dt.isoformat()
|
||||
query = PaginationQuery(page=1, per_page=-1, query_filter=f'createdAt>="{dt}"')
|
||||
unit_results = units_repo.page_all(query).items
|
||||
unit_ids = set(unit.id for unit in unit_results)
|
||||
unit_ids = {unit.id for unit in unit_results}
|
||||
assert len(unit_ids) == 0
|
||||
|
||||
|
||||
@@ -931,7 +931,7 @@ def test_pagination_filter_dates(api_client: TestClient, unique_user: TestUser):
|
||||
|
||||
## Yesterday
|
||||
params = {
|
||||
f"page": 1,
|
||||
"page": 1,
|
||||
"perPage": -1,
|
||||
"queryFilter": f"date >= {yesterday.strftime('%Y-%m-%d')}",
|
||||
}
|
||||
@@ -940,12 +940,12 @@ def test_pagination_filter_dates(api_client: TestClient, unique_user: TestUser):
|
||||
response_json = response.json()
|
||||
|
||||
assert len(response_json["items"]) == 2
|
||||
fetched_mealplan_titles = set(mp["title"] for mp in response_json["items"])
|
||||
fetched_mealplan_titles = {mp["title"] for mp in response_json["items"]}
|
||||
assert mealplan_today.title in fetched_mealplan_titles
|
||||
assert mealplan_tomorrow.title in fetched_mealplan_titles
|
||||
|
||||
params = {
|
||||
f"page": 1,
|
||||
"page": 1,
|
||||
"perPage": -1,
|
||||
"queryFilter": f"date > {yesterday.strftime('%Y-%m-%d')}",
|
||||
}
|
||||
@@ -954,13 +954,13 @@ def test_pagination_filter_dates(api_client: TestClient, unique_user: TestUser):
|
||||
response_json = response.json()
|
||||
|
||||
assert len(response_json["items"]) == 2
|
||||
fetched_mealplan_titles = set(mp["title"] for mp in response_json["items"])
|
||||
fetched_mealplan_titles = {mp["title"] for mp in response_json["items"]}
|
||||
assert mealplan_today.title in fetched_mealplan_titles
|
||||
assert mealplan_tomorrow.title in fetched_mealplan_titles
|
||||
|
||||
## Today
|
||||
params = {
|
||||
f"page": 1,
|
||||
"page": 1,
|
||||
"perPage": -1,
|
||||
"queryFilter": f"date >= {today.strftime('%Y-%m-%d')}",
|
||||
}
|
||||
@@ -969,12 +969,12 @@ def test_pagination_filter_dates(api_client: TestClient, unique_user: TestUser):
|
||||
response_json = response.json()
|
||||
|
||||
assert len(response_json["items"]) == 2
|
||||
fetched_mealplan_titles = set(mp["title"] for mp in response_json["items"])
|
||||
fetched_mealplan_titles = {mp["title"] for mp in response_json["items"]}
|
||||
assert mealplan_today.title in fetched_mealplan_titles
|
||||
assert mealplan_tomorrow.title in fetched_mealplan_titles
|
||||
|
||||
params = {
|
||||
f"page": 1,
|
||||
"page": 1,
|
||||
"perPage": -1,
|
||||
"queryFilter": f"date > {today.strftime('%Y-%m-%d')}",
|
||||
}
|
||||
@@ -983,13 +983,13 @@ def test_pagination_filter_dates(api_client: TestClient, unique_user: TestUser):
|
||||
response_json = response.json()
|
||||
|
||||
assert len(response_json["items"]) == 1
|
||||
fetched_mealplan_titles = set(mp["title"] for mp in response_json["items"])
|
||||
fetched_mealplan_titles = {mp["title"] for mp in response_json["items"]}
|
||||
assert mealplan_today.title not in fetched_mealplan_titles
|
||||
assert mealplan_tomorrow.title in fetched_mealplan_titles
|
||||
|
||||
## Tomorrow
|
||||
params = {
|
||||
f"page": 1,
|
||||
"page": 1,
|
||||
"perPage": -1,
|
||||
"queryFilter": f"date >= {tomorrow.strftime('%Y-%m-%d')}",
|
||||
}
|
||||
@@ -998,12 +998,12 @@ def test_pagination_filter_dates(api_client: TestClient, unique_user: TestUser):
|
||||
response_json = response.json()
|
||||
|
||||
assert len(response_json["items"]) == 1
|
||||
fetched_mealplan_titles = set(mp["title"] for mp in response_json["items"])
|
||||
fetched_mealplan_titles = {mp["title"] for mp in response_json["items"]}
|
||||
assert mealplan_today.title not in fetched_mealplan_titles
|
||||
assert mealplan_tomorrow.title in fetched_mealplan_titles
|
||||
|
||||
params = {
|
||||
f"page": 1,
|
||||
"page": 1,
|
||||
"perPage": -1,
|
||||
"queryFilter": f"date > {tomorrow.strftime('%Y-%m-%d')}",
|
||||
}
|
||||
@@ -1015,7 +1015,7 @@ def test_pagination_filter_dates(api_client: TestClient, unique_user: TestUser):
|
||||
|
||||
## Day After Tomorrow
|
||||
params = {
|
||||
f"page": 1,
|
||||
"page": 1,
|
||||
"perPage": -1,
|
||||
"queryFilter": f"date >= {day_after_tomorrow.strftime('%Y-%m-%d')}",
|
||||
}
|
||||
@@ -1025,7 +1025,7 @@ def test_pagination_filter_dates(api_client: TestClient, unique_user: TestUser):
|
||||
assert len(response_json["items"]) == 0
|
||||
|
||||
params = {
|
||||
f"page": 1,
|
||||
"page": 1,
|
||||
"perPage": -1,
|
||||
"queryFilter": f"date > {day_after_tomorrow.strftime('%Y-%m-%d')}",
|
||||
}
|
||||
@@ -1077,20 +1077,20 @@ def test_pagination_filter_advanced_frontend_sort(database: AllRepositories, uni
|
||||
CategorySave(group_id=unique_user.group_id, name=random_string(10)),
|
||||
CategorySave(group_id=unique_user.group_id, name=random_string(10)),
|
||||
]
|
||||
category_1, category_2 = [database.categories.create(category) for category in categories]
|
||||
category_1, category_2 = (database.categories.create(category) for category in categories)
|
||||
|
||||
slug1, slug2 = (random_string(10) for _ in range(2))
|
||||
tags = [
|
||||
TagSave(group_id=unique_user.group_id, name=slug1, slug=slug1),
|
||||
TagSave(group_id=unique_user.group_id, name=slug2, slug=slug2),
|
||||
]
|
||||
tag_1, tag_2 = [database.tags.create(tag) for tag in tags]
|
||||
tag_1, tag_2 = (database.tags.create(tag) for tag in tags)
|
||||
|
||||
tools = [
|
||||
RecipeToolSave(group_id=unique_user.group_id, name=random_string(10)),
|
||||
RecipeToolSave(group_id=unique_user.group_id, name=random_string(10)),
|
||||
]
|
||||
tool_1, tool_2 = [database.tools.create(tool) for tool in tools]
|
||||
tool_1, tool_2 = (database.tools.create(tool) for tool in tools)
|
||||
|
||||
# Bootstrap the database with recipes
|
||||
slug = random_string()
|
||||
|
||||
@@ -44,7 +44,7 @@ def search_recipes(database: AllRepositories, unique_local_group_id: str, unique
|
||||
user_id=unique_local_user_id,
|
||||
group_id=unique_local_group_id,
|
||||
name="Steinbock Sloop",
|
||||
description=f"My favorite horns are delicious",
|
||||
description="My favorite horns are delicious",
|
||||
recipe_ingredient=[
|
||||
RecipeIngredient(note="alpine animal"),
|
||||
],
|
||||
@@ -302,7 +302,7 @@ def test_recipe_repo_pagination_by_categories(database: AllRepositories, unique_
|
||||
order_direction=OrderDirection.asc,
|
||||
)
|
||||
random_ordered = []
|
||||
for i in range(5):
|
||||
for _ in range(5):
|
||||
pagination_query.pagination_seed = str(datetime.now(timezone.utc))
|
||||
random_ordered.append(database.recipes.page_all(pagination_query, categories=[category_slug]).items)
|
||||
assert not all(i == random_ordered[0] for i in random_ordered)
|
||||
@@ -395,7 +395,7 @@ def test_recipe_repo_pagination_by_tags(database: AllRepositories, unique_user:
|
||||
order_direction=OrderDirection.asc,
|
||||
)
|
||||
random_ordered = []
|
||||
for i in range(5):
|
||||
for _ in range(5):
|
||||
pagination_query.pagination_seed = str(datetime.now(timezone.utc))
|
||||
random_ordered.append(database.recipes.page_all(pagination_query, tags=[tag_slug]).items)
|
||||
assert len(random_ordered[0]) == 15
|
||||
@@ -491,7 +491,7 @@ def test_recipe_repo_pagination_by_tools(database: AllRepositories, unique_user:
|
||||
order_direction=OrderDirection.asc,
|
||||
)
|
||||
random_ordered = []
|
||||
for i in range(5):
|
||||
for _ in range(5):
|
||||
pagination_query.pagination_seed = str(datetime.now(timezone.utc))
|
||||
random_ordered.append(database.recipes.page_all(pagination_query, tools=[tool_id]).items)
|
||||
assert len(random_ordered[0]) == 15
|
||||
@@ -575,7 +575,7 @@ def test_recipe_repo_pagination_by_foods(database: AllRepositories, unique_user:
|
||||
order_direction=OrderDirection.asc,
|
||||
)
|
||||
random_ordered = []
|
||||
for i in range(5):
|
||||
for _ in range(5):
|
||||
pagination_query.pagination_seed = str(datetime.now(timezone.utc))
|
||||
random_ordered.append(database.recipes.page_all(pagination_query, foods=[food_id]).items)
|
||||
assert len(random_ordered[0]) == 15
|
||||
|
||||
@@ -25,7 +25,7 @@ from mealie.services.backups_v2.backup_v2 import BackupV2
|
||||
def dict_sorter(d: dict) -> Any:
|
||||
possible_keys = {"created_at", "id"}
|
||||
|
||||
return next((d[key] for key in possible_keys if key in d and d[key]), 1)
|
||||
return next((d[key] for key in possible_keys if d.get(key)), 1)
|
||||
|
||||
|
||||
# For Future Use
|
||||
@@ -68,7 +68,7 @@ def test_database_restore():
|
||||
new_exporter = AlchemyExporter(settings.DB_URL)
|
||||
snapshop_2 = new_exporter.dump()
|
||||
|
||||
for s1, s2 in zip(snapshop_1, snapshop_2):
|
||||
for s1, s2 in zip(snapshop_1, snapshop_2, strict=False):
|
||||
assert snapshop_1[s1].sort(key=dict_sorter) == snapshop_2[s2].sort(key=dict_sorter)
|
||||
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ def test_get_locked_users(database: AllRepositories, user_tuple: list[TestUser])
|
||||
elif locked_user.id == user_2.id:
|
||||
assert locked_user.locked_at == user_2.locked_at
|
||||
else:
|
||||
assert False
|
||||
raise AssertionError()
|
||||
|
||||
# Cleanup
|
||||
user_service.unlock_user(user_1)
|
||||
|
||||
@@ -145,7 +145,7 @@ def test_nlp_parser() -> None:
|
||||
models: list[CRFIngredient] = convert_list_to_crf_model([x.input for x in test_ingredients])
|
||||
|
||||
# Iterate over models and test_ingredients to gather
|
||||
for model, test_ingredient in zip(models, test_ingredients):
|
||||
for model, test_ingredient in zip(models, test_ingredients, strict=False):
|
||||
assert round(float(sum(Fraction(s) for s in model.qty.split())), 3) == pytest.approx(test_ingredient.quantity)
|
||||
|
||||
assert model.comment == test_ingredient.comments
|
||||
|
||||
@@ -27,9 +27,9 @@ class LdapConnMock:
|
||||
self.name = name
|
||||
|
||||
def simple_bind_s(self, dn, bind_pw):
|
||||
if dn == "cn={}, {}".format(self.user, self.app_settings.LDAP_BASE_DN):
|
||||
if dn == f"cn={self.user}, {self.app_settings.LDAP_BASE_DN}":
|
||||
valid_password = self.password
|
||||
elif "cn={}, {}".format(self.query_bind, self.app_settings.LDAP_BASE_DN):
|
||||
elif f"cn={self.query_bind}, {self.app_settings.LDAP_BASE_DN}":
|
||||
valid_password = self.query_password
|
||||
|
||||
if bind_pw == valid_password:
|
||||
@@ -42,7 +42,7 @@ class LdapConnMock:
|
||||
if filter == self.app_settings.LDAP_ADMIN_FILTER:
|
||||
assert attrlist == []
|
||||
assert filter == self.app_settings.LDAP_ADMIN_FILTER
|
||||
assert dn == "cn={}, {}".format(self.user, self.app_settings.LDAP_BASE_DN)
|
||||
assert dn == f"cn={self.user}, {self.app_settings.LDAP_BASE_DN}"
|
||||
assert scope == ldap.SCOPE_BASE
|
||||
|
||||
if not self.admin:
|
||||
@@ -60,11 +60,9 @@ class LdapConnMock:
|
||||
mail_attribute=self.app_settings.LDAP_MAIL_ATTRIBUTE,
|
||||
input=self.user,
|
||||
)
|
||||
search_filter = "(&(|({id_attribute}={input})({mail_attribute}={input})){filter})".format(
|
||||
id_attribute=self.app_settings.LDAP_ID_ATTRIBUTE,
|
||||
mail_attribute=self.app_settings.LDAP_MAIL_ATTRIBUTE,
|
||||
input=self.user,
|
||||
filter=user_filter,
|
||||
search_filter = (
|
||||
f"(&(|({self.app_settings.LDAP_ID_ATTRIBUTE}={self.user})"
|
||||
f"({self.app_settings.LDAP_MAIL_ATTRIBUTE}={self.user})){user_filter})"
|
||||
)
|
||||
assert filter == search_filter
|
||||
assert dn == self.app_settings.LDAP_BASE_DN
|
||||
@@ -72,7 +70,7 @@ class LdapConnMock:
|
||||
|
||||
return [
|
||||
(
|
||||
"cn={}, {}".format(self.user, self.app_settings.LDAP_BASE_DN),
|
||||
f"cn={self.user}, {self.app_settings.LDAP_BASE_DN}",
|
||||
{
|
||||
self.app_settings.LDAP_ID_ATTRIBUTE: [self.user.encode()],
|
||||
self.app_settings.LDAP_NAME_ATTRIBUTE: [self.name.encode()],
|
||||
|
||||
Reference in New Issue
Block a user