mirror of
https://github.com/mealie-recipes/mealie.git
synced 2026-04-16 18:05:35 -04:00
feat: Added Option to Import Recipe Category During Recipe Import (#6523)
Co-authored-by: Michael Genson <genson.michael@gmail.com> Co-authored-by: Michael Genson <71845777+michael-genson@users.noreply.github.com>
This commit is contained in:
@@ -521,8 +521,8 @@ def clean_categories(category: str | list) -> list[str]:
|
||||
case str(category):
|
||||
if not category.strip():
|
||||
return []
|
||||
|
||||
return [category]
|
||||
# Split comma-separated categories
|
||||
return [cat.strip().title() for cat in category.split(",") if cat.strip()]
|
||||
case [str(), *_]:
|
||||
return [cat.strip().title() for cat in category if cat.strip()]
|
||||
case [{"name": str(), "slug": str()}, *_]:
|
||||
|
||||
@@ -4,7 +4,7 @@ from slugify import slugify
|
||||
|
||||
from mealie.repos.repository_factory import AllRepositories
|
||||
from mealie.schema.recipe import TagOut
|
||||
from mealie.schema.recipe.recipe_category import TagSave
|
||||
from mealie.schema.recipe.recipe_category import CategorySave, TagSave
|
||||
|
||||
|
||||
class NoContextException(Exception):
|
||||
@@ -19,10 +19,14 @@ class ScraperContext:
|
||||
class ScrapedExtras:
|
||||
def __init__(self) -> None:
|
||||
self._tags: list[str] = []
|
||||
self._categories: list[str] = []
|
||||
|
||||
def set_tags(self, tags: list[str]) -> None:
|
||||
self._tags = tags
|
||||
|
||||
def set_categories(self, categories: list[str]) -> None:
|
||||
self._categories = categories
|
||||
|
||||
def use_tags(self, ctx: ScraperContext) -> list[TagOut]:
|
||||
if not self._tags:
|
||||
return []
|
||||
@@ -49,3 +53,30 @@ class ScrapedExtras:
|
||||
tags.append(db_tag)
|
||||
|
||||
return tags
|
||||
|
||||
def use_categories(self, ctx: ScraperContext) -> list[TagOut]:
|
||||
if not self._categories:
|
||||
return []
|
||||
|
||||
repo = ctx.repos.categories
|
||||
|
||||
categories = []
|
||||
seen_category_slugs: set[str] = set()
|
||||
for category in self._categories:
|
||||
slugify_category = slugify(category)
|
||||
if slugify_category in seen_category_slugs:
|
||||
continue
|
||||
|
||||
seen_category_slugs.add(slugify_category)
|
||||
|
||||
# Check if category exists
|
||||
if db_category := repo.get_one(slugify_category, "slug"):
|
||||
categories.append(db_category)
|
||||
continue
|
||||
|
||||
save_data = CategorySave(name=category, group_id=ctx.repos.group_id)
|
||||
db_category = repo.create(save_data)
|
||||
|
||||
categories.append(db_category)
|
||||
|
||||
return categories
|
||||
|
||||
@@ -194,6 +194,7 @@ class RecipeScraperPackage(ABCScraperStrategy):
|
||||
extras = ScrapedExtras()
|
||||
|
||||
extras.set_tags(try_get_default(scraped_data.keywords, "keywords", "", cleaner.clean_tags))
|
||||
extras.set_categories(try_get_default(scraped_data.category, "recipeCategory", "", cleaner.clean_categories))
|
||||
|
||||
recipe = Recipe(
|
||||
name=try_get_default(scraped_data.title, "name", "No Name Found", cleaner.clean_string),
|
||||
|
||||
Reference in New Issue
Block a user