Compare commits

..

2 Commits

Author SHA1 Message Date
Hayden
23cb4bdf36 fix: use unique temp filename for migration archive uploads 2026-05-13 17:57:55 -05:00
Hayden
4039ff6655 fix: support CSV/TXT upload and add validation for Plan to Eat import (#6360)
Plan to Eat exports CSV or TXT files directly, but the importer only accepted
ZIP archives. This caused a silent failure when users uploaded CSV files.

- Extend plantoeat_recipes() to detect ZIP vs CSV/TXT by magic bytes and
  process raw CSV/TXT files directly without requiring a ZIP wrapper
- Add _validate_archive() to return a clear error report entry when the
  uploaded file is neither a ZIP nor valid UTF-8 text
- Update frontend file input to accept .zip, .csv, and .txt
- Update i18n description to mention all accepted formats
- Add plantoeat.csv test fixture and integration tests for CSV import
  and invalid file type rejection
2026-05-13 17:24:08 -05:00
9 changed files with 97 additions and 23 deletions

View File

@@ -427,7 +427,7 @@
"mealie-text": "Mealie can import recipes from the Mealie application from a pre v1.0 release. Export your recipes from your old instance, and upload the zip file below. Note that only recipes can be imported from the export.", "mealie-text": "Mealie can import recipes from the Mealie application from a pre v1.0 release. Export your recipes from your old instance, and upload the zip file below. Note that only recipes can be imported from the export.",
"plantoeat": { "plantoeat": {
"title": "Plan to Eat", "title": "Plan to Eat",
"description-long": "Mealie can import recipies from Plan to Eat." "description-long": "Mealie can import recipes from Plan to Eat. Upload a ZIP archive, CSV, or TXT file exported from Plan to Eat."
}, },
"myrecipebox": { "myrecipebox": {
"title": "My Recipe Box", "title": "My Recipe Box",

View File

@@ -337,16 +337,8 @@ const _content: Record<string, MigrationContent> = {
}, },
[MIGRATIONS.plantoeat]: { [MIGRATIONS.plantoeat]: {
text: i18n.t("migration.plantoeat.description-long"), text: i18n.t("migration.plantoeat.description-long"),
acceptedFileType: ".zip", acceptedFileType: ".zip,.csv,.txt",
tree: [ tree: false,
{
icon: $globals.icons.zip,
title: "plantoeat-recipes-508318_10-13-2023.zip",
children: [
{ title: "plantoeat-recipes-508318_10-13-2023.csv", icon: $globals.icons.codeJson },
],
},
],
}, },
[MIGRATIONS.recipekeeper]: { [MIGRATIONS.recipekeeper]: {
text: i18n.t("migration.recipekeeper.description-long"), text: i18n.t("migration.recipekeeper.description-long"),

View File

@@ -179,7 +179,7 @@ def validate_file_token(token: str | None = None) -> Path:
@contextmanager @contextmanager
def get_temporary_zip_path(auto_unlink=True) -> Generator[Path, None, None]: def get_temporary_zip_path(auto_unlink=True) -> Generator[Path, None, None]:
app_dirs.TEMP_DIR.mkdir(exist_ok=True, parents=True) app_dirs.TEMP_DIR.mkdir(exist_ok=True, parents=True)
temp_path = app_dirs.TEMP_DIR.joinpath("my_zip_archive.zip") temp_path = app_dirs.TEMP_DIR / f"{uuid4().hex}.zip"
try: try:
yield temp_path yield temp_path
finally: finally:

View File

@@ -35,7 +35,7 @@ class OpenIDProvider(AuthProvider[UserInfo]):
self._logger.debug("[OIDC] %s: %s", key, value) self._logger.debug("[OIDC] %s: %s", key, value)
if not self.required_claims.issubset(claims.keys()): if not self.required_claims.issubset(claims.keys()):
self._logger.debug( self._logger.error(
"[OIDC] Required claims not present. Expected: %s Actual: %s", "[OIDC] Required claims not present. Expected: %s Actual: %s",
self.required_claims, self.required_claims,
claims.keys(), claims.keys(),
@@ -45,7 +45,7 @@ class OpenIDProvider(AuthProvider[UserInfo]):
# Check for empty required claims # Check for empty required claims
for claim in self.required_claims: for claim in self.required_claims:
if not claims.get(claim): if not claims.get(claim):
self._logger.debug("[OIDC] Required claim '%s' is empty", claim) self._logger.error("[OIDC] Required claim '%s' is empty", claim)
raise MissingClaimException() raise MissingClaimException()
repos = get_repositories(self.session, group_id=None, household_id=None) repos = get_repositories(self.session, group_id=None, household_id=None)

View File

@@ -134,7 +134,6 @@ async def oauth_callback(request: Request, session: Session = Depends(generate_s
auth_provider = OpenIDProvider(session, userinfo, use_default_groups=True) auth_provider = OpenIDProvider(session, userinfo, use_default_groups=True)
auth = auth_provider.authenticate() auth = auth_provider.authenticate()
except MissingClaimException: except MissingClaimException:
logger.error("[OIDC] Required claims not present in ID token or userinfo endpoint")
auth = None auth = None
if not auth: if not auth:

View File

@@ -7,6 +7,7 @@ from pathlib import Path
from slugify import slugify from slugify import slugify
from mealie.pkgs.cache import cache_key from mealie.pkgs.cache import cache_key
from mealie.schema.reports.reports import ReportEntryCreate
from mealie.services.scraper import cleaner from mealie.services.scraper import cleaner
from ._migration_base import BaseMigrator from ._migration_base import BaseMigrator
@@ -15,7 +16,11 @@ from .utils.migration_helpers import scrape_image, split_by_comma
def plantoeat_recipes(file: Path): def plantoeat_recipes(file: Path):
"""Yields all recipes inside the export file as dict""" """Yields all recipes inside the export file as dict.
Accepts a ZIP archive containing a CSV, or a raw CSV/TXT file.
"""
if zipfile.is_zipfile(file):
with tempfile.TemporaryDirectory() as tmpdir: with tempfile.TemporaryDirectory() as tmpdir:
with zipfile.ZipFile(file) as zip_file: with zipfile.ZipFile(file) as zip_file:
zip_file.extractall(tmpdir) zip_file.extractall(tmpdir)
@@ -24,6 +29,10 @@ def plantoeat_recipes(file: Path):
with open(name, newline="") as csvfile: with open(name, newline="") as csvfile:
reader = csv.DictReader(csvfile) reader = csv.DictReader(csvfile)
yield from reader yield from reader
else:
with open(file, newline="", encoding="utf-8", errors="ignore") as csvfile:
reader = csv.DictReader(csvfile)
yield from reader
def get_value_as_string_or_none(dictionary: dict, key: str): def get_value_as_string_or_none(dictionary: dict, key: str):
@@ -112,7 +121,32 @@ class PlanToEatMigrator(BaseMigrator):
return recipe_dict return recipe_dict
def _validate_archive(self) -> bool:
"""Returns False and appends a failure report entry if the file is not a ZIP, CSV, or TXT."""
if zipfile.is_zipfile(self.archive):
return True
try:
with open(self.archive, encoding="utf-8", errors="strict") as f:
f.read(512)
return True
except UnicodeDecodeError:
pass
self.report_entries.append(
ReportEntryCreate(
report_id=self.report_id,
success=False,
message="Unsupported file format. Please upload a ZIP archive, CSV file, or TXT file.",
exception="",
)
)
return False
def _migrate(self) -> None: def _migrate(self) -> None:
if not self._validate_archive():
return
recipe_image_urls = {} recipe_image_urls = {}
recipes = [] recipes = []

View File

@@ -45,6 +45,8 @@ migrations_tandoor = CWD / "migrations/tandoor.zip"
migrations_plantoeat = CWD / "migrations/plantoeat.zip" migrations_plantoeat = CWD / "migrations/plantoeat.zip"
migrations_plantoeat_csv = CWD / "migrations/plantoeat.csv"
migrations_myrecipebox = CWD / "migrations/myrecipebox.csv" migrations_myrecipebox = CWD / "migrations/myrecipebox.csv"
migrations_recipekeeper = CWD / "migrations/recipekeeper.zip" migrations_recipekeeper = CWD / "migrations/recipekeeper.zip"

View File

@@ -0,0 +1,13 @@
Title,Course,Cuisine,Main Ingredient,Description,Source,Url,Url Host,Prep Time,Cook Time,Total Time,Servings,Yield,Ingredients,Directions,Tags,Rating,Public Url,Photo Url,Private,Nutritional Score (generic),Calories,Fat,Saturated Fat,Cholesterol,Sodium,Sugar,Carbohydrate,Fiber,Protein,Cost,Created At,Updated At
Test Recipe,Main Course,American,Beans,"This is a description.
Here is new line.",Manually entered source,https://eatwithclarity.com/sushi-bowl-with-sesame-tofu/,,75,75,150,7,1 loaf,", Heading
2 itm Test, note
, Heading2
3 pkg Two, note2
","Directions.
Will go here.","Allergen-Friendly, Cheap, Test",3,https://app.plantoeat.com/recipes/38843883,https://plantoeat.s3.amazonaws.com/recipes/29516709/470292506c8d9b71582487a7879ab7b197d06490-large.jpg?1628205591,yes,,13,16,17,18,19,22,20,21,23,,2023-10-13 20:29:29,2023-10-13 20:32:48
Test Recipe2,,,,,,,,,,,,,"2 itm Test, note
3 pkg Two, note2
","Directions.
Will go here.",,,,,,,,,,,,,,,,,2023-10-13 20:29:29,2023-10-13 20:32:48
1 Title Course Cuisine Main Ingredient Description Source Url Url Host Prep Time Cook Time Total Time Servings Yield Ingredients Directions Tags Rating Public Url Photo Url Private Nutritional Score (generic) Calories Fat Saturated Fat Cholesterol Sodium Sugar Carbohydrate Fiber Protein Cost Created At Updated At
2 Test Recipe Main Course American Beans This is a description. Here is new line. Manually entered source https://eatwithclarity.com/sushi-bowl-with-sesame-tofu/ 75 75 150 7 1 loaf , Heading 2 itm Test, note , Heading2 3 pkg Two, note2 Directions. Will go here. Allergen-Friendly, Cheap, Test 3 https://app.plantoeat.com/recipes/38843883 https://plantoeat.s3.amazonaws.com/recipes/29516709/470292506c8d9b71582487a7879ab7b197d06490-large.jpg?1628205591 yes 13 16 17 18 19 22 20 21 23 2023-10-13 20:29:29 2023-10-13 20:32:48
3 Test Recipe2 2 itm Test, note 3 pkg Two, note2 Directions. Will go here. 2023-10-13 20:29:29 2023-10-13 20:32:48

View File

@@ -94,6 +94,15 @@ test_cases = [
"transFatContent", "transFatContent",
}, },
), ),
MigrationTestData(
typ=SupportedMigrations.plantoeat,
archive=test_data.migrations_plantoeat_csv,
search_slug="test-recipe",
nutrition_filter={
"unsaturatedFatContent",
"transFatContent",
},
),
MigrationTestData( MigrationTestData(
typ=SupportedMigrations.myrecipebox, typ=SupportedMigrations.myrecipebox,
archive=test_data.migrations_myrecipebox, archive=test_data.migrations_myrecipebox,
@@ -124,6 +133,7 @@ test_ids = [
"mealie_alpha_archive", "mealie_alpha_archive",
"tandoor_archive", "tandoor_archive",
"plantoeat_archive", "plantoeat_archive",
"plantoeat_csv",
"myrecipebox_csv", "myrecipebox_csv",
"recipekeeper_archive", "recipekeeper_archive",
"cookn_archive", "cookn_archive",
@@ -190,6 +200,30 @@ def test_recipe_migration(api_client: TestClient, unique_user_fn_scoped: TestUse
# TODO: validate other types of content # TODO: validate other types of content
def test_plantoeat_rejects_invalid_file_type(api_client: TestClient, unique_user: TestUser) -> None:
# Simulate uploading a binary file (e.g. PDF) that is neither ZIP nor CSV/TXT
binary_content = bytes(range(256)) * 4 # arbitrary binary data that is not valid UTF-8
payload = {"migration_type": SupportedMigrations.plantoeat.value}
file_payload = {"archive": binary_content}
response = api_client.post(
api_routes.groups_migrations,
data=payload,
files=file_payload,
headers=unique_user.token,
)
assert response.status_code == 200
report_id = response.json()["id"]
response = api_client.get(api_routes.groups_reports_item_id(report_id), headers=unique_user.token)
assert response.status_code == 200
report = response.json()
assert report["entries"]
assert not report["entries"][0]["success"]
assert "ZIP" in report["entries"][0]["message"] or "CSV" in report["entries"][0]["message"]
def test_bad_mealie_alpha_data_is_ignored(api_client: TestClient, unique_user: TestUser): def test_bad_mealie_alpha_data_is_ignored(api_client: TestClient, unique_user: TestUser):
with TemporaryDirectory() as tmpdir: with TemporaryDirectory() as tmpdir:
with ZipFile(test_data.migrations_mealie) as zf: with ZipFile(test_data.migrations_mealie) as zf: