feat: Added Option to Import Recipe Category During Recipe Import (#6523)

Co-authored-by: Michael Genson <genson.michael@gmail.com>
Co-authored-by: Michael Genson <71845777+michael-genson@users.noreply.github.com>
This commit is contained in:
Gtt1229
2026-01-30 12:18:15 -05:00
committed by GitHub
parent e3e45c534e
commit e83891e3ca
11 changed files with 86 additions and 13 deletions

View File

@@ -165,6 +165,11 @@ class RecipeController(BaseRecipeController):
recipe.tags = extras.use_tags(ctx) # type: ignore
if req.include_categories:
ctx = ScraperContext(self.repos)
recipe.recipe_category = extras.use_categories(ctx) # type: ignore
new_recipe = self.service.create_one(recipe)
if new_recipe:

View File

@@ -10,6 +10,7 @@ class ScrapeRecipeTest(MealieModel):
class ScrapeRecipeBase(MealieModel):
include_tags: bool = False
include_categories: bool = False
class ScrapeRecipe(ScrapeRecipeBase):
@@ -19,6 +20,7 @@ class ScrapeRecipe(ScrapeRecipeBase):
"example": {
"url": "https://myfavoriterecipes.com/recipes",
"includeTags": True,
"includeCategories": True,
},
}
)

View File

@@ -521,8 +521,8 @@ def clean_categories(category: str | list) -> list[str]:
case str(category):
if not category.strip():
return []
return [category]
# Split comma-separated categories
return [cat.strip().title() for cat in category.split(",") if cat.strip()]
case [str(), *_]:
return [cat.strip().title() for cat in category if cat.strip()]
case [{"name": str(), "slug": str()}, *_]:

View File

@@ -4,7 +4,7 @@ from slugify import slugify
from mealie.repos.repository_factory import AllRepositories
from mealie.schema.recipe import TagOut
from mealie.schema.recipe.recipe_category import TagSave
from mealie.schema.recipe.recipe_category import CategorySave, TagSave
class NoContextException(Exception):
@@ -19,10 +19,14 @@ class ScraperContext:
class ScrapedExtras:
def __init__(self) -> None:
self._tags: list[str] = []
self._categories: list[str] = []
def set_tags(self, tags: list[str]) -> None:
self._tags = tags
def set_categories(self, categories: list[str]) -> None:
self._categories = categories
def use_tags(self, ctx: ScraperContext) -> list[TagOut]:
if not self._tags:
return []
@@ -49,3 +53,30 @@ class ScrapedExtras:
tags.append(db_tag)
return tags
def use_categories(self, ctx: ScraperContext) -> list[TagOut]:
if not self._categories:
return []
repo = ctx.repos.categories
categories = []
seen_category_slugs: set[str] = set()
for category in self._categories:
slugify_category = slugify(category)
if slugify_category in seen_category_slugs:
continue
seen_category_slugs.add(slugify_category)
# Check if category exists
if db_category := repo.get_one(slugify_category, "slug"):
categories.append(db_category)
continue
save_data = CategorySave(name=category, group_id=ctx.repos.group_id)
db_category = repo.create(save_data)
categories.append(db_category)
return categories

View File

@@ -194,6 +194,7 @@ class RecipeScraperPackage(ABCScraperStrategy):
extras = ScrapedExtras()
extras.set_tags(try_get_default(scraped_data.keywords, "keywords", "", cleaner.clean_tags))
extras.set_categories(try_get_default(scraped_data.category, "recipeCategory", "", cleaner.clean_categories))
recipe = Recipe(
name=try_get_default(scraped_data.title, "name", "No Name Found", cleaner.clean_string),