Release v0.1.0 Candidate (#85)
* Changed uvicorn port to 80 * Changed port in docker-compose to match dockerfile * Readded environment variables in docker-compose * production image rework * Use opengraph metadata to make basic recipe cards when full recipe metadata is not available * fixed instrucitons on parse * add last_recipe * automated testing * roadmap update * Sqlite (#75) * file structure * auto-test * take 2 * refactor ap scheduler and startup process * fixed scraper error * database abstraction * database abstraction * port recipes over to new schema * meal migration * start settings migration * finale mongo port * backup improvements * migration imports to new DB structure * unused import cleanup * docs strings * settings and theme import logic * cleanup * fixed tinydb error * requirements * fuzzy search * remove scratch file * sqlalchemy models * improved search ui * recipe models almost done * sql modal population * del scratch * rewrite database model mixins * mostly grabage * recipe updates * working sqllite * remove old files and reorganize * final cleanup Co-authored-by: Hayden <hay-kot@pm.me> * Backup card (#78) * backup / import dialog * upgrade to new tag method * New import card * rename settings.py to app_config.py * migrate to poetry for development * fix failing test Co-authored-by: Hayden <hay-kot@pm.me> * added mkdocs to docker-compose * Translations (#72) * Translations + danish * changed back proxy target to use ENV * Resolved more merge conflicts * Removed test in translation * Documentation of translations * Updated translations * removed old packages Co-authored-by: Hayden <64056131+hay-kot@users.noreply.github.com> * fail to start bug fixes * feature: prep/cook/total time slots (#80) Co-authored-by: Hayden <hay-kot@pm.me> * missing bind attributes * Bug fixes (#81) * fix: url remains after succesful import * docs: changelog + update todos * arm image * arm compose * compose updates * update poetry * arm support Co-authored-by: Hayden <hay-kot@pm.me> * dockerfile hotfix * dockerfile hotfix * Version Release Final Touches (#84) * Remove slim * bug: opacity issues * bug: startup failure with no database * ci/cd on dev branch * formatting * v0.1.0 documentation Co-authored-by: Hayden <hay-kot@pm.me> * db init hotfix * bug: fix crash in mongo * fix mongo bug * fixed version notifier * finale changelog Co-authored-by: kentora <=> Co-authored-by: Hayden <hay-kot@pm.me> Co-authored-by: Richard Mitic <richard.h.mitic@gmail.com> Co-authored-by: kentora <kentora@kentora.dk>
@@ -1,10 +1,9 @@
|
||||
from pathlib import Path
|
||||
|
||||
import uvicorn
|
||||
from fastapi import FastAPI
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
|
||||
import startup
|
||||
# import utils.startup as startup
|
||||
from app_config import PORT, PRODUCTION, WEB_PATH, docs_url, redoc_url
|
||||
from routes import (
|
||||
backup_routes,
|
||||
meal_routes,
|
||||
@@ -14,13 +13,9 @@ from routes import (
|
||||
static_routes,
|
||||
user_routes,
|
||||
)
|
||||
from routes.setting_routes import scheduler # ! This has to be imported for scheduling
|
||||
from settings import PORT, PRODUCTION, docs_url, redoc_url
|
||||
from utils.api_docs import generate_api_docs
|
||||
from utils.logger import logger
|
||||
|
||||
CWD = Path(__file__).parent
|
||||
WEB_PATH = CWD.joinpath("dist")
|
||||
|
||||
app = FastAPI(
|
||||
title="Mealie",
|
||||
description="A place for all your recipes",
|
||||
@@ -29,18 +24,25 @@ app = FastAPI(
|
||||
redoc_url=redoc_url,
|
||||
)
|
||||
|
||||
# Mount Vue Frontend only in production
|
||||
if PRODUCTION:
|
||||
|
||||
def mount_static_files():
|
||||
app.mount("/static", StaticFiles(directory=WEB_PATH, html=True))
|
||||
|
||||
|
||||
# API Routes
|
||||
app.include_router(recipe_routes.router)
|
||||
app.include_router(meal_routes.router)
|
||||
app.include_router(setting_routes.router)
|
||||
app.include_router(backup_routes.router)
|
||||
app.include_router(user_routes.router)
|
||||
app.include_router(migration_routes.router)
|
||||
def api_routers():
|
||||
# First
|
||||
app.include_router(recipe_routes.router)
|
||||
app.include_router(meal_routes.router)
|
||||
app.include_router(setting_routes.router)
|
||||
app.include_router(backup_routes.router)
|
||||
app.include_router(user_routes.router)
|
||||
app.include_router(migration_routes.router)
|
||||
|
||||
|
||||
if PRODUCTION:
|
||||
mount_static_files()
|
||||
|
||||
api_routers()
|
||||
|
||||
# API 404 Catch all CALL AFTER ROUTERS
|
||||
@app.get("/api/{full_path:path}", status_code=404, include_in_schema=False)
|
||||
@@ -50,12 +52,10 @@ def invalid_api():
|
||||
|
||||
app.include_router(static_routes.router)
|
||||
|
||||
startup.ensure_dirs()
|
||||
startup.generate_default_theme()
|
||||
|
||||
# Generate API Documentation
|
||||
if not PRODUCTION:
|
||||
startup.generate_api_docs(app)
|
||||
generate_api_docs(app)
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info("-----SYSTEM STARTUP-----")
|
||||
|
||||
@@ -3,20 +3,40 @@ from pathlib import Path
|
||||
|
||||
import dotenv
|
||||
|
||||
# Helpful Globas
|
||||
CWD = Path(__file__).parent
|
||||
|
||||
|
||||
def ensure_dirs():
|
||||
for dir in REQUIRED_DIRS:
|
||||
dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
# Register ENV
|
||||
ENV = CWD.joinpath(".env")
|
||||
dotenv.load_dotenv(ENV)
|
||||
|
||||
# Helpful Globals
|
||||
BASE_DIR = CWD
|
||||
DATA_DIR = CWD.joinpath("data")
|
||||
WEB_PATH = CWD.joinpath("dist")
|
||||
IMG_DIR = DATA_DIR.joinpath("img")
|
||||
BACKUP_DIR = DATA_DIR.joinpath("backups")
|
||||
DEBUG_DIR = DATA_DIR.joinpath("debug")
|
||||
MIGRATION_DIR = DATA_DIR.joinpath("migration")
|
||||
TEMPLATE_DIR = DATA_DIR.joinpath("templates")
|
||||
TEMP_DIR = DATA_DIR.joinpath("temp")
|
||||
SQLITE_DIR = DATA_DIR.joinpath("db")
|
||||
TEMP_DIR = DATA_DIR.joinpath(".temp")
|
||||
|
||||
REQUIRED_DIRS = [
|
||||
DATA_DIR,
|
||||
IMG_DIR,
|
||||
BACKUP_DIR,
|
||||
DEBUG_DIR,
|
||||
MIGRATION_DIR,
|
||||
TEMPLATE_DIR,
|
||||
SQLITE_DIR,
|
||||
]
|
||||
|
||||
# Env Variables
|
||||
ENV = CWD.joinpath(".env")
|
||||
dotenv.load_dotenv(ENV)
|
||||
|
||||
# General
|
||||
PRODUCTION = os.environ.get("ENV")
|
||||
@@ -30,6 +50,24 @@ else:
|
||||
docs_url = None
|
||||
redoc_url = None
|
||||
|
||||
SQLITE_FILE = None
|
||||
# DATABASE ENV
|
||||
DATABASE_TYPE = os.getenv("db_type", "sqlite") # mongo, sqlite
|
||||
if DATABASE_TYPE == "sqlite":
|
||||
USE_SQL = True
|
||||
USE_MONGO = False
|
||||
SQLITE_FILE = SQLITE_DIR.joinpath("mealie.sqlite")
|
||||
|
||||
|
||||
elif DATABASE_TYPE == "mongo":
|
||||
USE_MONGO = True
|
||||
USE_SQL = False
|
||||
|
||||
else:
|
||||
raise Exception(
|
||||
"Unable to determine database type. Acceptible options are 'mongo' or 'tinydb' "
|
||||
)
|
||||
|
||||
# Mongo Database
|
||||
MEALIE_DB_NAME = os.getenv("mealie_db_name", "mealie")
|
||||
DB_USERNAME = os.getenv("db_username", "root")
|
||||
@@ -37,6 +75,9 @@ DB_PASSWORD = os.getenv("db_password", "example")
|
||||
DB_HOST = os.getenv("db_host", "mongo")
|
||||
DB_PORT = os.getenv("db_port", 27017)
|
||||
|
||||
# SFTP Email Stuff
|
||||
# SFTP Email Stuff - For use Later down the line!
|
||||
SFTP_USERNAME = os.getenv("sftp_username", None)
|
||||
SFTP_PASSWORD = os.getenv("sftp_password", None)
|
||||
|
||||
|
||||
ensure_dirs()
|
||||
@@ -1,91 +1,104 @@
|
||||
{
|
||||
"@context": "http://schema.org/",
|
||||
"@context": "http://schema.org",
|
||||
"@type": "Recipe",
|
||||
"name": "Jalape\u00f1o Popper Dip",
|
||||
"author": {
|
||||
"@type": "Person",
|
||||
"name": "Michelle"
|
||||
},
|
||||
"description": "Jalapeno Popper Dip is creamy, cheesy and has just the perfect amount of kick. Great appetizer for your next party or watching the big game!",
|
||||
"datePublished": "2016-02-22 00:01:37+00:00",
|
||||
"image": "jalapeno-popper-dip.jpg",
|
||||
"recipeYield": [
|
||||
"10",
|
||||
"10 to 12 servings"
|
||||
"articleBody": "\u201cMy great-grandmothers were Indigenous and mostly nomadic, which means lots of fungi foraging,\u201d says Maricela Vega, the chef at Atlanta restaurant 8ARM and founder of Chicomec\u00f3atl, an organization centering the foodways of Indigenous Mexican diaspora. \u201cWhen I serve lion\u2019s mane mushrooms to vegans they sometimes mistake them for chicken, but they\u2019re more affordable, better for the planet, and help strengthen your immune system! They grow wild during Georgia winters, but at-home cultivation kits are easy for those without forest access. I use pumpkin hot sauce, oil, and fresh herbs as a marinade, then bust out a baby grill or cast-iron skillet to get them nice and crispy.\u201d This is a multistep recipe but perfect for long winter days when you want to flood the house with toasty, irresistible aromas. When you chop the mixed herbs, save the stems and throw them into the blanching water for the vegetables to add flavor.",
|
||||
"alternativeHeadline": "This dish is perfect for long winter days when you want to flood the house with toasty, irresistible aromas.",
|
||||
"dateModified": "2021-01-11 18:32:43.962000",
|
||||
"datePublished": "2021-01-12 04:00:00",
|
||||
"keywords": [
|
||||
"recipes",
|
||||
"healthyish",
|
||||
"vegan",
|
||||
"herb",
|
||||
"apple cider vinegar",
|
||||
"kosher salt",
|
||||
"carrot",
|
||||
"potato",
|
||||
"radicchio",
|
||||
"mushroom",
|
||||
"oyster mushrooms",
|
||||
"oil",
|
||||
"black pepper",
|
||||
"lemon",
|
||||
"web"
|
||||
],
|
||||
"prepTime": "0:15:00",
|
||||
"cookTime": "0:30:00",
|
||||
"totalTime": "0:45:00",
|
||||
"thumbnailUrl": "https://assets.bonappetit.com/photos/5ffc74b39cbb0a3c54d7400f/1:1/w_1199,h_1199,c_limit/HLY-Maricela-Vega-Grilled%20Mushrooms%20and%20Root%20Vegetables.jpg",
|
||||
"publisher": {
|
||||
"@context": "https://schema.org",
|
||||
"@type": "Organization",
|
||||
"name": "Bon App\u00e9tit",
|
||||
"logo": {
|
||||
"@type": "ImageObject",
|
||||
"url": "https://www.bonappetit.com/verso/static/bon-appetit/assets/logo-seo.328de564b950e3d5d1fbe3e42f065290ca1d3844.png",
|
||||
"width": "479px",
|
||||
"height": "100px"
|
||||
},
|
||||
"url": "https://www.bonappetit.com"
|
||||
},
|
||||
"isPartOf": {
|
||||
"@type": [
|
||||
"CreativeWork",
|
||||
"Product"
|
||||
],
|
||||
"name": "Bon App\u00e9tit"
|
||||
},
|
||||
"isAccessibleForFree": true,
|
||||
"author": [
|
||||
{
|
||||
"@type": "Person",
|
||||
"name": "Maricela Vega",
|
||||
"sameAs": "https://bon-appetit.com/contributor/maricela-vega/"
|
||||
}
|
||||
],
|
||||
"description": "This dish is perfect for long winter days when you want to flood the house with toasty, irresistible aromas.",
|
||||
"image": "grilled-mushrooms-and-root-vegetables.jpg",
|
||||
"headline": "Grilled Mushrooms and Root Vegetables",
|
||||
"name": "Grilled Mushrooms and Root Vegetables",
|
||||
"recipeIngredient": [
|
||||
"16 ounces cream cheese (at room temperature)",
|
||||
"1 cup mayonnaise",
|
||||
"8 pieces of bacon (cooked and chopped)",
|
||||
"6 jalape\u00f1os (seeded and minced (if you can't get fresh, substitute a 4-ounce can diced jalape\u00f1o peppers, drained))",
|
||||
"2 cloves garlic (minced)",
|
||||
"\u00bd teaspoon cumin",
|
||||
"6 ounces cheddar cheese (shredded (about 1\u00bd cups))",
|
||||
"1 cup panko breadcrumbs",
|
||||
"1 cup grated Parmesan cheese",
|
||||
"4 tablespoons unsalted butter, melted"
|
||||
"1 cup Sesame Cr\u00e8me",
|
||||
"2 Tbsp. Allium Confit",
|
||||
"2 Tbsp. finely chopped mixed tender herbs (parsley, oregano, and/or mint), stems reservedfor blanching vegetables",
|
||||
"2 Tbsp. apple cider vinegar",
|
||||
"Kosher salt, freshly ground pepper",
|
||||
"1 cup finely chopped mixed herbs (parsley, oregano, and/or mint), stems reserved for blanching vegetables",
|
||||
"Kosher salt",
|
||||
"4 medium carrots (about 1 lb.), preferably rainbow, scrubbed, halved lengthwise, cut crosswise into thirds",
|
||||
"4 lb. baby potatoes, halved",
|
||||
"1 head of radicchio, coarsely chopped, divided",
|
||||
"1 lb. lion\u2019s mane, king trumpet, or oyster mushrooms, cut into 2\" pieces",
|
||||
"\u00bd cup Pumpkin Hot Sauce",
|
||||
"\u00bd cup grapeseed or vegetable oil",
|
||||
"Freshly ground black pepper",
|
||||
"1 cup Spiced Pecans",
|
||||
"1 lemon, halved"
|
||||
],
|
||||
"recipeInstructions": [
|
||||
{
|
||||
"@type": "HowToStep",
|
||||
"text": "Preheat oven to 375 degrees F.",
|
||||
"name": "Preheat oven to 375 degrees F.",
|
||||
"url": "https://www.browneyedbaker.com/jalapeno-popper-dip/#wprm-recipe-44993-step-0-0"
|
||||
"text": "Pur\u00e9e Sesame Cr\u00e8me, Allium Confit, chopped herbs, and vinegar in a blender on high speed, adding ice water by the tablespoonful as needed to achieve a pourable consistency, until smooth and creamy. Season sauce with salt and pepper."
|
||||
},
|
||||
{
|
||||
"@type": "HowToStep",
|
||||
"text": "Combine the cream cheese, mayonnaise, bacon, jalapenos, garlic, cumin and cheddar cheese in a mixing bowl. Transfer the mixture into 2-quart baking dish.",
|
||||
"name": "Combine the cream cheese, mayonnaise, bacon, jalapenos, garlic, cumin and cheddar cheese in a mixing bowl. Transfer the mixture into 2-quart baking dish.",
|
||||
"url": "https://www.browneyedbaker.com/jalapeno-popper-dip/#wprm-recipe-44993-step-0-1"
|
||||
"text": "Fill a large pot three quarters full with water, add reserved herb stems, and season heavily with salt. Bring water to a boil, then add carrots and cook until just tender, about 3 minutes. Using a slotted spoon, immediately transfer carrots to a large bowl of ice water and let cool."
|
||||
},
|
||||
{
|
||||
"@type": "HowToStep",
|
||||
"text": "Combine the panko breadcrumbs, Parmesan cheese and melted butter in a small bowl, tossing with a fork until the mixture is evenly moistened. Sprinkle evenly over the cream cheese mixture.",
|
||||
"name": "Combine the panko breadcrumbs, Parmesan cheese and melted butter in a small bowl, tossing with a fork until the mixture is evenly moistened. Sprinkle evenly over the cream cheese mixture.",
|
||||
"url": "https://www.browneyedbaker.com/jalapeno-popper-dip/#wprm-recipe-44993-step-0-2"
|
||||
"text": "Place potatoes in same pot and return to a boil. Cook until tender (flesh should be easy to pierce with a fork), about 10 minutes. Using slotted spoon, transfer potatoes to bowl of ice water and let cool. Drain carrots and potatoes; place in a clean large bowl and add half of the radicchio. Place mushrooms in a medium bowl."
|
||||
},
|
||||
{
|
||||
"@type": "HowToStep",
|
||||
"text": "Bake in the preheated oven for 25 to 30 minutes, until the top is golden brown and the dip is bubbling. Let rest for 5 minutes before serving. Serve with your favorite tortilla chips, crackers, vegetables, etc.",
|
||||
"name": "Bake in the preheated oven for 25 to 30 minutes, until the top is golden brown and the dip is bubbling. Let rest for 5 minutes before serving. Serve with your favorite tortilla chips, crackers, vegetables, etc.",
|
||||
"url": "https://www.browneyedbaker.com/jalapeno-popper-dip/#wprm-recipe-44993-step-0-3"
|
||||
"text": "Whisk Pumpkin Hot Sauce, oil, and chopped herbs in another medium bowl. Pour half of mixture over carrots and potatoes and the other half over mushrooms; toss each to coat. Season with salt and pepper."
|
||||
},
|
||||
{
|
||||
"text": "Prepare a grill for medium-high heat. (Alternatively, heat a large cast-iron skillet over medium-high.) Grill mushrooms, turning occasionally, until deep golden brown and crisp around the edges (or cook in batches, stirring often, if using a skillet), 12\u201314 minutes. Transfer mushrooms to a large shallow serving bowl."
|
||||
},
|
||||
{
|
||||
"text": "Grill carrots, potatoes, and radicchio, turning occasionally, until deep golden brown all over (or cook in batches, tossing often, if using a skillet), about 4 minutes. Transfer vegetables to bowl with mushrooms and toss to combine."
|
||||
},
|
||||
{
|
||||
"text": "To serve, drizzle generously with sesame sauce; top with Spiced Pecans and remaining radicchio. Squeeze juice from each lemon half over."
|
||||
}
|
||||
],
|
||||
"aggregateRating": {
|
||||
"@type": "AggregateRating",
|
||||
"ratingValue": "4.34",
|
||||
"ratingCount": "15"
|
||||
},
|
||||
"recipeCategory": [
|
||||
"Appetizer"
|
||||
],
|
||||
"recipeCuisine": [
|
||||
"American"
|
||||
],
|
||||
"keywords": "cheese dip, game day food, party food",
|
||||
"nutrition": {
|
||||
"@type": "NutritionInformation",
|
||||
"calories": "560 kcal",
|
||||
"carbohydrateContent": "7 g",
|
||||
"proteinContent": "14 g",
|
||||
"fatContent": "52 g",
|
||||
"saturatedFatContent": "21 g",
|
||||
"cholesterolContent": "109 mg",
|
||||
"sodiumContent": "707 mg",
|
||||
"sugarContent": "2 g",
|
||||
"servingSize": "1 serving"
|
||||
},
|
||||
"@id": "https://www.browneyedbaker.com/jalapeno-popper-dip/#recipe",
|
||||
"isPartOf": {
|
||||
"@id": "https://www.browneyedbaker.com/jalapeno-popper-dip/#article"
|
||||
},
|
||||
"mainEntityOfPage": "https://www.browneyedbaker.com/jalapeno-popper-dip/#webpage",
|
||||
"url": "https://www.browneyedbaker.com/jalapeno-popper-dip/",
|
||||
"slug": "jalapeno-popper-dip",
|
||||
"orgURL": "http://www.browneyedbaker.com/2011/08/03/jalapeno-popper-dip/",
|
||||
"recipeYield": "4 Servings",
|
||||
"url": "https://www.bonappetit.com/recipe/grilled-mushrooms-and-root-vegetables",
|
||||
"slug": "grilled-mushrooms-and-root-vegetables",
|
||||
"orgURL": "https://www.bonappetit.com/recipe/grilled-mushrooms-and-root-vegetables",
|
||||
"categories": [],
|
||||
"tags": [],
|
||||
"dateAdded": null,
|
||||
|
||||
|
Before Width: | Height: | Size: 519 KiB |
|
Before Width: | Height: | Size: 371 KiB |
|
Before Width: | Height: | Size: 259 KiB |
|
Before Width: | Height: | Size: 794 KiB |
|
Before Width: | Height: | Size: 572 KiB |
|
Before Width: | Height: | Size: 1.5 MiB |
|
Before Width: | Height: | Size: 528 KiB |
|
Before Width: | Height: | Size: 452 KiB |
|
Before Width: | Height: | Size: 212 KiB |
|
Before Width: | Height: | Size: 393 KiB |
|
Before Width: | Height: | Size: 556 KiB |
|
Before Width: | Height: | Size: 317 KiB |
|
Before Width: | Height: | Size: 1.3 MiB |
|
Before Width: | Height: | Size: 309 KiB |
|
Before Width: | Height: | Size: 650 KiB |
|
Before Width: | Height: | Size: 889 KiB |
|
Before Width: | Height: | Size: 294 KiB |
|
Before Width: | Height: | Size: 602 KiB |
|
Before Width: | Height: | Size: 184 KiB |
|
Before Width: | Height: | Size: 788 KiB |
|
Before Width: | Height: | Size: 664 KiB |
|
Before Width: | Height: | Size: 985 KiB |
|
Before Width: | Height: | Size: 154 KiB |
|
Before Width: | Height: | Size: 413 KiB |
|
Before Width: | Height: | Size: 184 KiB |
15
mealie/db/database.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from db.db_mealplan import _Meals
|
||||
from db.db_recipes import _Recipes
|
||||
from db.db_settings import _Settings
|
||||
from db.db_themes import _Themes
|
||||
|
||||
|
||||
class Database:
|
||||
def __init__(self) -> None:
|
||||
self.recipes = _Recipes()
|
||||
self.meals = _Meals()
|
||||
self.settings = _Settings()
|
||||
self.themes = _Themes()
|
||||
|
||||
|
||||
db = Database()
|
||||
191
mealie/db/db_base.py
Normal file
@@ -0,0 +1,191 @@
|
||||
import json
|
||||
from typing import Union
|
||||
|
||||
import mongoengine
|
||||
from app_config import USE_MONGO, USE_SQL
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
from db.sql.db_session import create_session
|
||||
from db.sql.model_base import SqlAlchemyBase
|
||||
|
||||
|
||||
class BaseDocument:
|
||||
def __init__(self) -> None:
|
||||
self.primary_key: str
|
||||
self.store: str
|
||||
self.document: mongoengine.Document
|
||||
self.sql_model: SqlAlchemyBase
|
||||
self.create_session = create_session
|
||||
|
||||
@staticmethod # TODO: Probably Put a version in each class to speed up reads?
|
||||
def _unpack_mongo(document) -> dict:
|
||||
document = json.loads(document.to_json())
|
||||
del document["_id"]
|
||||
|
||||
# Recipe Cleanup
|
||||
try:
|
||||
document["dateAdded"] = document["dateAdded"]["$date"]
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
document["uid"] = document["uid"]["$uuid"]
|
||||
except:
|
||||
pass
|
||||
|
||||
# Meal Plan
|
||||
try:
|
||||
document["startDate"] = document["startDate"]["$date"]
|
||||
document["endDate"] = document["endDate"]["$date"]
|
||||
|
||||
meals = []
|
||||
for meal in document["meals"]:
|
||||
meal["date"] = meal["date"]["$date"]
|
||||
meals.append(meal)
|
||||
document["meals"] = meals
|
||||
except:
|
||||
pass
|
||||
|
||||
return document
|
||||
|
||||
def get_all(self, limit: int = None, order_by: str = None):
|
||||
if USE_MONGO:
|
||||
if order_by:
|
||||
documents = self.document.objects.order_by(str(order_by)).limit(limit)
|
||||
elif limit == None:
|
||||
documents = self.document.objects()
|
||||
else:
|
||||
documents = self.document.objects().limit(limit)
|
||||
|
||||
docs = [BaseDocument._unpack_mongo(item) for item in documents]
|
||||
|
||||
if limit == 1:
|
||||
return docs[0]
|
||||
return docs
|
||||
|
||||
elif USE_SQL:
|
||||
session = create_session()
|
||||
list = [x.dict() for x in session.query(self.sql_model).all()]
|
||||
session.close()
|
||||
|
||||
if limit == 1:
|
||||
return list[0]
|
||||
|
||||
return list
|
||||
|
||||
def _query_one(
|
||||
self, match_value: str, match_key: str = None
|
||||
) -> Union[Session, SqlAlchemyBase]:
|
||||
"""Query the sql database for one item an return the sql alchemy model
|
||||
object. If no match key is provided the primary_key attribute will be used.
|
||||
|
||||
Args:
|
||||
match_value (str): The value to use in the query
|
||||
match_key (str, optional): the key/property to match against. Defaults to None.
|
||||
|
||||
Returns:
|
||||
Union[Session, SqlAlchemyBase]: Will return both the session and found model
|
||||
"""
|
||||
session = self.create_session()
|
||||
|
||||
if match_key == None:
|
||||
match_key = self.primary_key
|
||||
|
||||
result = (
|
||||
session.query(self.sql_model).filter_by(**{match_key: match_value}).one()
|
||||
)
|
||||
|
||||
return session, result
|
||||
|
||||
def get(
|
||||
self, match_value: str, match_key: str = None, limit=1
|
||||
) -> dict or list[dict]:
|
||||
"""Retrieves an entry from the database by matching a key/value pair. If no
|
||||
key is provided the class objects primary key will be used to match against.
|
||||
|
||||
|
||||
Args: \n
|
||||
match_value (str): A value used to match against the key/value in the database \n
|
||||
match_key (str, optional): They key to match the value against. Defaults to None. \n
|
||||
limit (int, optional): A limit to returned responses. Defaults to 1. \n
|
||||
|
||||
Returns:
|
||||
dict or list[dict]:
|
||||
"""
|
||||
if match_key == None:
|
||||
match_key = self.primary_key
|
||||
|
||||
if USE_MONGO:
|
||||
document = self.document.objects.get(**{str(match_key): match_value})
|
||||
db_entry = BaseDocument._unpack_mongo(document)
|
||||
|
||||
elif USE_SQL:
|
||||
session = self.create_session()
|
||||
result = (
|
||||
session.query(self.sql_model)
|
||||
.filter_by(**{match_key: match_value})
|
||||
.one()
|
||||
)
|
||||
db_entry = result.dict()
|
||||
session.close()
|
||||
|
||||
return db_entry
|
||||
|
||||
else:
|
||||
raise Exception("No database type established")
|
||||
|
||||
if limit == 1 and type(db_entry) == list:
|
||||
return db_entry[0]
|
||||
else:
|
||||
return db_entry
|
||||
|
||||
def save_new(self, document: dict) -> dict:
|
||||
if USE_MONGO:
|
||||
new_document = self.document(**document)
|
||||
new_document.save()
|
||||
return BaseDocument._unpack_mongo(new_document)
|
||||
elif USE_SQL:
|
||||
session = self.create_session()
|
||||
new_document = self.sql_model(**document)
|
||||
session.add(new_document)
|
||||
return_data = new_document.dict()
|
||||
session.commit()
|
||||
|
||||
return return_data
|
||||
|
||||
def update(self, match_value, new_data) -> dict:
|
||||
if USE_MONGO:
|
||||
return_data = self.update_mongo(match_value, new_data)
|
||||
elif USE_SQL:
|
||||
session, entry = self._query_one(match_value=match_value)
|
||||
entry.update(session=session, **new_data)
|
||||
return_data = entry.dict()
|
||||
session.commit()
|
||||
|
||||
session.close()
|
||||
else:
|
||||
raise Exception("No Database Configured")
|
||||
|
||||
return return_data
|
||||
|
||||
def delete(self, primary_key_value) -> dict:
|
||||
if USE_MONGO:
|
||||
document = self.document.objects.get(
|
||||
**{str(self.primary_key): primary_key_value}
|
||||
)
|
||||
|
||||
if document:
|
||||
document.delete()
|
||||
elif USE_SQL:
|
||||
session = create_session()
|
||||
|
||||
result = (
|
||||
session.query(self.sql_model)
|
||||
.filter_by(**{self.primary_key: primary_key_value})
|
||||
.one()
|
||||
)
|
||||
|
||||
session.delete(result)
|
||||
|
||||
session.commit()
|
||||
session.close()
|
||||
63
mealie/db/db_mealplan.py
Normal file
@@ -0,0 +1,63 @@
|
||||
from typing import List
|
||||
|
||||
from app_config import USE_MONGO, USE_SQL
|
||||
|
||||
from db.db_base import BaseDocument
|
||||
from db.db_setup import USE_MONGO, USE_SQL
|
||||
from db.mongo.meal_models import MealDocument, MealPlanDocument
|
||||
from db.sql.db_session import create_session
|
||||
from db.sql.meal_models import MealPlanModel
|
||||
|
||||
|
||||
class _Meals(BaseDocument):
|
||||
def __init__(self) -> None:
|
||||
self.primary_key = "uid"
|
||||
if USE_SQL:
|
||||
self.sql_model = MealPlanModel
|
||||
self.create_session = create_session
|
||||
|
||||
self.document = MealPlanDocument
|
||||
|
||||
@staticmethod
|
||||
def _process_meals(meals: List[dict]) -> List[MealDocument]:
|
||||
"""Turns a list of Meals in dictionary form into a list of
|
||||
MealDocuments that can be attached to a MealPlanDocument
|
||||
|
||||
|
||||
Args: \n
|
||||
meals (List[dict]): From a Pydantic Class in meal_services.py \n
|
||||
|
||||
Returns:
|
||||
a List of MealDocuments
|
||||
"""
|
||||
meal_docs = []
|
||||
for meal in meals:
|
||||
meal_doc = MealDocument(**meal)
|
||||
meal_docs.append(meal_doc)
|
||||
|
||||
return meal_docs
|
||||
|
||||
def save_new_mongo(self, plan_data: dict) -> None:
|
||||
"""Saves a new meal plan into the database
|
||||
|
||||
Args: \n
|
||||
plan_data (dict): From a Pydantic Class in meal_services.py \n
|
||||
"""
|
||||
|
||||
if USE_MONGO:
|
||||
plan_data["meals"] = _Meals._process_meals(plan_data["meals"])
|
||||
document = self.document(**plan_data)
|
||||
|
||||
document.save()
|
||||
elif USE_SQL:
|
||||
pass
|
||||
|
||||
def update_mongo(self, uid: str, plan_data: dict) -> dict:
|
||||
if USE_MONGO:
|
||||
document = self.document.objects.get(uid=uid)
|
||||
if document:
|
||||
new_meals = _Meals._process_meals(plan_data["meals"])
|
||||
document.update(set__meals=new_meals)
|
||||
document.save()
|
||||
elif USE_SQL:
|
||||
pass
|
||||
68
mealie/db/db_recipes.py
Normal file
@@ -0,0 +1,68 @@
|
||||
from app_config import USE_MONGO, USE_SQL
|
||||
|
||||
from db.db_base import BaseDocument
|
||||
from db.mongo.recipe_models import RecipeDocument
|
||||
from db.sql.db_session import create_session
|
||||
from db.sql.recipe_models import RecipeModel
|
||||
|
||||
|
||||
class _Recipes(BaseDocument):
|
||||
def __init__(self) -> None:
|
||||
self.primary_key = "slug"
|
||||
if USE_SQL:
|
||||
self.sql_model = RecipeModel
|
||||
self.create_session = create_session
|
||||
else:
|
||||
self.document = RecipeDocument
|
||||
|
||||
def save_new_sql(self, recipe_data: dict):
|
||||
session = self.create_session()
|
||||
new_recipe = self.sql_model(**recipe_data)
|
||||
session.add(new_recipe)
|
||||
session.commit()
|
||||
|
||||
return recipe_data
|
||||
|
||||
def update_mongo(self, slug: str, new_data: dict) -> None:
|
||||
if USE_MONGO:
|
||||
document = self.document.objects.get(slug=slug)
|
||||
|
||||
if document:
|
||||
document.update(set__name=new_data.get("name"))
|
||||
document.update(set__description=new_data.get("description"))
|
||||
document.update(set__image=new_data.get("image"))
|
||||
document.update(set__recipeYield=new_data.get("recipeYield"))
|
||||
document.update(set__recipeIngredient=new_data.get("recipeIngredient"))
|
||||
document.update(
|
||||
set__recipeInstructions=new_data.get("recipeInstructions")
|
||||
)
|
||||
document.update(set__totalTime=new_data.get("totalTime"))
|
||||
|
||||
document.update(set__slug=new_data.get("slug"))
|
||||
document.update(set__categories=new_data.get("categories"))
|
||||
document.update(set__tags=new_data.get("tags"))
|
||||
document.update(set__notes=new_data.get("notes"))
|
||||
document.update(set__orgURL=new_data.get("orgURL"))
|
||||
document.update(set__rating=new_data.get("rating"))
|
||||
document.update(set__extras=new_data.get("extras"))
|
||||
document.save()
|
||||
|
||||
return new_data
|
||||
# elif USE_SQL:
|
||||
# session, recipe = self._query_one(match_value=slug)
|
||||
# recipe.update(session=session, **new_data)
|
||||
# recipe_dict = recipe.dict()
|
||||
# session.commit()
|
||||
|
||||
# session.close()
|
||||
|
||||
# return recipe_dict
|
||||
|
||||
def update_image(self, slug: str, extension: str) -> None:
|
||||
if USE_MONGO:
|
||||
document = self.document.objects.get(slug=slug)
|
||||
|
||||
if document:
|
||||
document.update(set__image=f"{slug}.{extension}")
|
||||
elif USE_SQL:
|
||||
pass
|
||||
44
mealie/db/db_settings.py
Normal file
@@ -0,0 +1,44 @@
|
||||
from app_config import USE_MONGO, USE_SQL
|
||||
|
||||
from db.db_base import BaseDocument
|
||||
from db.db_setup import USE_MONGO, USE_SQL
|
||||
from db.mongo.settings_models import SiteSettingsDocument, WebhooksDocument
|
||||
from db.sql.db_session import create_session
|
||||
from db.sql.settings_models import SiteSettingsModel
|
||||
|
||||
|
||||
class _Settings(BaseDocument):
|
||||
def __init__(self) -> None:
|
||||
|
||||
self.primary_key = "name"
|
||||
|
||||
if USE_SQL:
|
||||
self.sql_model = SiteSettingsModel
|
||||
self.create_session = create_session
|
||||
|
||||
self.document = SiteSettingsDocument
|
||||
|
||||
def save_new(self, main: dict, webhooks: dict) -> str:
|
||||
|
||||
if USE_MONGO:
|
||||
main["webhooks"] = WebhooksDocument(**webhooks)
|
||||
new_doc = self.document(**main)
|
||||
return new_doc.save()
|
||||
|
||||
elif USE_SQL:
|
||||
session = create_session()
|
||||
new_settings = self.sql_model(main.get("name"), webhooks)
|
||||
|
||||
session.add(new_settings)
|
||||
session.commit()
|
||||
|
||||
return new_settings.dict()
|
||||
|
||||
def update_mongo(self, name: str, new_data: dict) -> dict:
|
||||
if USE_MONGO:
|
||||
document = self.document.objects.get(name=name)
|
||||
if document:
|
||||
document.update(set__webhooks=WebhooksDocument(**new_data["webhooks"]))
|
||||
document.save()
|
||||
elif USE_SQL:
|
||||
return
|
||||
16
mealie/db/db_setup.py
Normal file
@@ -0,0 +1,16 @@
|
||||
from app_config import SQLITE_FILE, USE_MONGO, USE_SQL
|
||||
|
||||
from db.sql.db_session import globa_init as sql_global_init
|
||||
|
||||
sql_exists = True
|
||||
|
||||
if USE_SQL:
|
||||
sql_exists = SQLITE_FILE.is_file()
|
||||
sql_global_init(SQLITE_FILE)
|
||||
|
||||
pass
|
||||
|
||||
elif USE_MONGO:
|
||||
from db.mongo.mongo_setup import global_init as mongo_global_init
|
||||
|
||||
mongo_global_init()
|
||||
56
mealie/db/db_themes.py
Normal file
@@ -0,0 +1,56 @@
|
||||
from app_config import USE_MONGO, USE_SQL
|
||||
|
||||
from db.db_base import BaseDocument
|
||||
from db.db_setup import USE_MONGO, USE_SQL
|
||||
from db.mongo.settings_models import SiteThemeDocument, ThemeColorsDocument
|
||||
from db.sql.db_session import create_session
|
||||
from db.sql.theme_models import SiteThemeModel
|
||||
|
||||
|
||||
class _Themes(BaseDocument):
|
||||
def __init__(self) -> None:
|
||||
self.primary_key = "name"
|
||||
if USE_SQL:
|
||||
self.sql_model = SiteThemeModel
|
||||
self.create_session = create_session
|
||||
else:
|
||||
self.document = SiteThemeDocument
|
||||
|
||||
def save_new(self, theme_data: dict) -> None:
|
||||
if USE_MONGO:
|
||||
theme_data["colors"] = ThemeColorsDocument(**theme_data["colors"])
|
||||
|
||||
document = self.document(**theme_data)
|
||||
|
||||
document.save()
|
||||
elif USE_SQL:
|
||||
session = self.create_session()
|
||||
new_theme = self.sql_model(**theme_data)
|
||||
|
||||
session.add(new_theme)
|
||||
session.commit()
|
||||
|
||||
return_data = new_theme.dict()
|
||||
|
||||
session.close()
|
||||
return return_data
|
||||
|
||||
def update(self, data: dict) -> dict:
|
||||
if USE_MONGO:
|
||||
colors = ThemeColorsDocument(**data["colors"])
|
||||
theme_document = self.document.objects.get(name=data.get("name"))
|
||||
|
||||
if theme_document:
|
||||
theme_document.update(set__colors=colors)
|
||||
theme_document.save()
|
||||
else:
|
||||
raise Exception("No database entry was found to update")
|
||||
|
||||
elif USE_SQL:
|
||||
session, theme_model = self._query_one(
|
||||
match_value=data["name"], match_key="name"
|
||||
)
|
||||
|
||||
theme_model.update(**data)
|
||||
session.commit()
|
||||
session.close()
|
||||
@@ -1,5 +1,6 @@
|
||||
import mongoengine
|
||||
from settings import DB_HOST, DB_PASSWORD, DB_PORT, DB_USERNAME, MEALIE_DB_NAME
|
||||
from app_config import DB_HOST, DB_PASSWORD, DB_PORT, DB_USERNAME, MEALIE_DB_NAME
|
||||
from utils.logger import logger
|
||||
|
||||
|
||||
def global_init():
|
||||
@@ -12,3 +13,5 @@ def global_init():
|
||||
password=DB_PASSWORD,
|
||||
authentication_source="admin",
|
||||
)
|
||||
|
||||
logger.info("Mongo Data Initialized")
|
||||
@@ -1,5 +1,4 @@
|
||||
import datetime
|
||||
import uuid
|
||||
|
||||
import mongoengine
|
||||
|
||||
@@ -19,7 +18,7 @@ class RecipeDocument(mongoengine.Document):
|
||||
slug = mongoengine.StringField(required=True, unique=True)
|
||||
categories = mongoengine.ListField(default=[])
|
||||
tags = mongoengine.ListField(default=[])
|
||||
dateAdded = mongoengine.DateTimeField(binary=True, default=datetime.date.today())
|
||||
dateAdded = mongoengine.DateTimeField(binary=True, default=datetime.date.today)
|
||||
notes = mongoengine.ListField(default=[])
|
||||
rating = mongoengine.IntField(required=True, default=0)
|
||||
orgURL = mongoengine.URLField(required=False)
|
||||
6
mealie/db/mongo/user_models.py
Normal file
@@ -0,0 +1,6 @@
|
||||
|
||||
# import mongoengine
|
||||
|
||||
# class User(mongoengine.Document):
|
||||
# username: mongoengine.EmailField()
|
||||
# password: mongoengine.ReferenceField
|
||||
4
mealie/db/sql/_all_models.py
Normal file
@@ -0,0 +1,4 @@
|
||||
from db.sql.meal_models import *
|
||||
from db.sql.recipe_models import *
|
||||
from db.sql.settings_models import *
|
||||
from db.sql.theme_models import *
|
||||
29
mealie/db/sql/db_session.py
Normal file
@@ -0,0 +1,29 @@
|
||||
from pathlib import Path
|
||||
|
||||
import sqlalchemy as sa
|
||||
import sqlalchemy.orm as orm
|
||||
from db.sql.model_base import SqlAlchemyBase
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
__factory = None
|
||||
|
||||
|
||||
def globa_init(db_file: Path):
|
||||
global __factory
|
||||
|
||||
if __factory:
|
||||
return
|
||||
conn_str = "sqlite:///" + str(db_file.absolute())
|
||||
|
||||
engine = sa.create_engine(conn_str, echo=False)
|
||||
|
||||
__factory = orm.sessionmaker(bind=engine)
|
||||
|
||||
import db.sql._all_models
|
||||
|
||||
SqlAlchemyBase.metadata.create_all(engine)
|
||||
|
||||
|
||||
def create_session() -> Session:
|
||||
global __factory
|
||||
return __factory()
|
||||
66
mealie/db/sql/meal_models.py
Normal file
@@ -0,0 +1,66 @@
|
||||
import uuid
|
||||
from typing import List
|
||||
|
||||
import sqlalchemy as sa
|
||||
import sqlalchemy.orm as orm
|
||||
from db.sql.model_base import BaseMixins, SqlAlchemyBase
|
||||
|
||||
|
||||
class Meal(SqlAlchemyBase):
|
||||
__tablename__ = "meal"
|
||||
id = sa.Column(sa.Integer, primary_key=True)
|
||||
parent_id = sa.Column(sa.String, sa.ForeignKey("mealplan.uid"))
|
||||
slug = sa.Column(sa.String)
|
||||
name = sa.Column(sa.String)
|
||||
date = sa.Column(sa.Date)
|
||||
dateText = sa.Column(sa.String)
|
||||
image = sa.Column(sa.String)
|
||||
description = sa.Column(sa.String)
|
||||
|
||||
def __init__(self, slug, name, date, dateText, image, description) -> None:
|
||||
self.slug = slug
|
||||
self.name = name
|
||||
self.date = date
|
||||
self.dateText = dateText
|
||||
self.image = image
|
||||
self.description = description
|
||||
|
||||
def dict(self) -> dict:
|
||||
data = {
|
||||
"slug": self.slug,
|
||||
"name": self.name,
|
||||
"date": self.date,
|
||||
"dateText": self.dateText,
|
||||
"image": self.image,
|
||||
"description": self.description,
|
||||
}
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class MealPlanModel(SqlAlchemyBase, BaseMixins):
|
||||
__tablename__ = "mealplan"
|
||||
uid = sa.Column(sa.Integer, primary_key=True, unique=True) #! Probably Bad?
|
||||
startDate = sa.Column(sa.Date)
|
||||
endDate = sa.Column(sa.Date)
|
||||
meals: List[Meal] = orm.relation(Meal)
|
||||
|
||||
def __init__(self, startDate, endDate, meals, uid=None) -> None:
|
||||
self.startDate = startDate
|
||||
self.endDate = endDate
|
||||
self.meals = [Meal(**meal) for meal in meals]
|
||||
|
||||
def update(self, session, startDate, endDate, meals, uid) -> None:
|
||||
MealPlanModel._sql_remove_list(session, [Meal], uid)
|
||||
|
||||
self.__init__(startDate, endDate, meals)
|
||||
|
||||
def dict(self) -> dict:
|
||||
data = {
|
||||
"uid": self.uid,
|
||||
"startDate": self.startDate,
|
||||
"endDate": self.endDate,
|
||||
"meals": [meal.dict() for meal in self.meals],
|
||||
}
|
||||
|
||||
return data
|
||||
22
mealie/db/sql/model_base.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from typing import List
|
||||
|
||||
import sqlalchemy.ext.declarative as dec
|
||||
|
||||
SqlAlchemyBase = dec.declarative_base()
|
||||
|
||||
|
||||
class BaseMixins:
|
||||
@staticmethod
|
||||
def _sql_remove_list(session, list_of_tables: list, parent_id):
|
||||
|
||||
for table in list_of_tables:
|
||||
session.query(table).filter_by(parent_id=parent_id).delete()
|
||||
|
||||
@staticmethod
|
||||
def _flatten_dict(list_of_dict: List[dict]):
|
||||
finalMap = {}
|
||||
for d in list_of_dict:
|
||||
|
||||
finalMap.update(d.dict())
|
||||
|
||||
return finalMap
|
||||
244
mealie/db/sql/recipe_models.py
Normal file
@@ -0,0 +1,244 @@
|
||||
import datetime
|
||||
from datetime import date
|
||||
from typing import List
|
||||
|
||||
import sqlalchemy as sa
|
||||
import sqlalchemy.orm as orm
|
||||
from db.sql.model_base import BaseMixins, SqlAlchemyBase
|
||||
from sqlalchemy.ext.orderinglist import ordering_list
|
||||
|
||||
|
||||
class ApiExtras(SqlAlchemyBase):
|
||||
__tablename__ = "api_extras"
|
||||
id = sa.Column(sa.Integer, primary_key=True)
|
||||
parent_id = sa.Column(sa.String, sa.ForeignKey("recipes.id"))
|
||||
key_name = sa.Column(sa.String, unique=True)
|
||||
value = sa.Column(sa.String)
|
||||
|
||||
def __init__(self, key, value) -> None:
|
||||
self.key_name = key
|
||||
self.value = value
|
||||
|
||||
def dict(self):
|
||||
return {self.key_name: self.value}
|
||||
|
||||
|
||||
class Category(SqlAlchemyBase):
|
||||
__tablename__ = "categories"
|
||||
id = sa.Column(sa.Integer, primary_key=True)
|
||||
parent_id = sa.Column(sa.String, sa.ForeignKey("recipes.id"))
|
||||
name = sa.Column(sa.String, index=True)
|
||||
|
||||
def to_str(self):
|
||||
return self.name
|
||||
|
||||
|
||||
class Tag(SqlAlchemyBase):
|
||||
__tablename__ = "tags"
|
||||
id = sa.Column(sa.Integer, primary_key=True)
|
||||
parent_id = sa.Column(sa.String, sa.ForeignKey("recipes.id"))
|
||||
name = sa.Column(sa.String, index=True)
|
||||
|
||||
def to_str(self):
|
||||
return self.name
|
||||
|
||||
|
||||
class Note(SqlAlchemyBase):
|
||||
__tablename__ = "notes"
|
||||
id = sa.Column(sa.Integer, primary_key=True)
|
||||
parent_id = sa.Column(sa.String, sa.ForeignKey("recipes.id"))
|
||||
title = sa.Column(sa.String)
|
||||
text = sa.Column(sa.String)
|
||||
|
||||
def dict(self):
|
||||
return {"title": self.title, "text": self.text}
|
||||
|
||||
|
||||
class RecipeIngredient(SqlAlchemyBase):
|
||||
__tablename__ = "recipes_ingredients"
|
||||
id = sa.Column(sa.Integer, primary_key=True)
|
||||
position = sa.Column(sa.Integer)
|
||||
parent_id = sa.Column(sa.String, sa.ForeignKey("recipes.id"))
|
||||
ingredient = sa.Column(sa.String)
|
||||
|
||||
def update(self, ingredient):
|
||||
self.ingredient = ingredient
|
||||
|
||||
def to_str(self):
|
||||
return self.ingredient
|
||||
|
||||
|
||||
class RecipeInstruction(SqlAlchemyBase):
|
||||
__tablename__ = "recipe_instructions"
|
||||
id = sa.Column(sa.Integer, primary_key=True)
|
||||
parent_id = sa.Column(sa.String, sa.ForeignKey("recipes.id"))
|
||||
position = sa.Column(sa.Integer)
|
||||
type = sa.Column(sa.String, default="")
|
||||
text = sa.Column(sa.String)
|
||||
|
||||
def dict(self):
|
||||
data = {"@type": self.type, "text": self.text}
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class RecipeModel(SqlAlchemyBase, BaseMixins):
|
||||
__tablename__ = "recipes"
|
||||
# Database Specific
|
||||
id = sa.Column(sa.Integer, primary_key=True)
|
||||
|
||||
# General Recipe Properties
|
||||
name = sa.Column(sa.String)
|
||||
description = sa.Column(sa.String)
|
||||
image = sa.Column(sa.String)
|
||||
recipeYield = sa.Column(sa.String)
|
||||
recipeIngredient: List[RecipeIngredient] = orm.relationship(
|
||||
"RecipeIngredient",
|
||||
cascade="all, delete",
|
||||
order_by="RecipeIngredient.position",
|
||||
collection_class=ordering_list("position"),
|
||||
)
|
||||
recipeInstructions: List[RecipeInstruction] = orm.relationship(
|
||||
"RecipeInstruction",
|
||||
cascade="all, delete",
|
||||
order_by="RecipeInstruction.position",
|
||||
collection_class=ordering_list("position"),
|
||||
)
|
||||
|
||||
# How to Properties
|
||||
totalTime = sa.Column(sa.String)
|
||||
prepTime = sa.Column(sa.String)
|
||||
performTime = sa.Column(sa.String)
|
||||
|
||||
# Mealie Specific
|
||||
slug = sa.Column(sa.String, index=True, unique=True)
|
||||
categories: List[Category] = orm.relationship(
|
||||
"Category",
|
||||
cascade="all, delete",
|
||||
)
|
||||
tags: List[Tag] = orm.relationship(
|
||||
"Tag",
|
||||
cascade="all, delete",
|
||||
)
|
||||
dateAdded = sa.Column(sa.Date, default=date.today)
|
||||
notes: List[Note] = orm.relationship(
|
||||
"Note",
|
||||
cascade="all, delete",
|
||||
)
|
||||
rating = sa.Column(sa.Integer)
|
||||
orgURL = sa.Column(sa.String)
|
||||
extras: List[ApiExtras] = orm.relationship("ApiExtras", cascade="all, delete")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str = None,
|
||||
description: str = None,
|
||||
image: str = None,
|
||||
recipeYield: str = None,
|
||||
recipeIngredient: List[str] = None,
|
||||
recipeInstructions: List[dict] = None,
|
||||
totalTime: str = None,
|
||||
prepTime: str = None,
|
||||
performTime: str = None,
|
||||
slug: str = None,
|
||||
categories: List[str] = None,
|
||||
tags: List[str] = None,
|
||||
dateAdded: datetime.date = None,
|
||||
notes: List[dict] = None,
|
||||
rating: int = None,
|
||||
orgURL: str = None,
|
||||
extras: dict = None,
|
||||
) -> None:
|
||||
self.name = name
|
||||
self.description = description
|
||||
self.image = image
|
||||
self.recipeYield = recipeYield
|
||||
self.recipeIngredient = [
|
||||
RecipeIngredient(ingredient=ingr) for ingr in recipeIngredient
|
||||
]
|
||||
self.recipeInstructions = [
|
||||
RecipeInstruction(text=instruc.get("text"), type=instruc.get("text"))
|
||||
for instruc in recipeInstructions
|
||||
]
|
||||
self.totalTime = totalTime
|
||||
self.prepTime = prepTime
|
||||
self.performTime = performTime
|
||||
|
||||
# Mealie Specific
|
||||
self.slug = slug
|
||||
self.categories = [Category(name=cat) for cat in categories]
|
||||
self.tags = [Tag(name=tag) for tag in tags]
|
||||
self.dateAdded = dateAdded
|
||||
self.notes = [Note(note) for note in notes]
|
||||
self.rating = rating
|
||||
self.orgURL = orgURL
|
||||
self.extras = [ApiExtras(key=key, value=value) for key, value in extras.items()]
|
||||
|
||||
def update(
|
||||
self,
|
||||
session,
|
||||
name: str = None,
|
||||
description: str = None,
|
||||
image: str = None,
|
||||
recipeYield: str = None,
|
||||
recipeIngredient: List[str] = None,
|
||||
recipeInstructions: List[dict] = None,
|
||||
totalTime: str = None,
|
||||
prepTime: str = None,
|
||||
performTime: str = None,
|
||||
slug: str = None,
|
||||
categories: List[str] = None,
|
||||
tags: List[str] = None,
|
||||
dateAdded: datetime.date = None,
|
||||
notes: List[dict] = None,
|
||||
rating: int = None,
|
||||
orgURL: str = None,
|
||||
extras: dict = None,
|
||||
):
|
||||
"""Updated a database entry by removing nested rows and rebuilds the row through the __init__ functions"""
|
||||
list_of_tables = [RecipeIngredient, RecipeInstruction, Category, Tag, ApiExtras]
|
||||
RecipeModel._sql_remove_list(session, list_of_tables, self.id)
|
||||
|
||||
self.__init__(
|
||||
name=name,
|
||||
description=description,
|
||||
image=image,
|
||||
recipeYield=recipeYield,
|
||||
recipeIngredient=recipeIngredient,
|
||||
recipeInstructions=recipeInstructions,
|
||||
totalTime=totalTime,
|
||||
prepTime=prepTime,
|
||||
performTime=performTime,
|
||||
slug=slug,
|
||||
categories=categories,
|
||||
tags=tags,
|
||||
dateAdded=dateAdded,
|
||||
notes=notes,
|
||||
rating=rating,
|
||||
orgURL=orgURL,
|
||||
extras=extras,
|
||||
)
|
||||
|
||||
def dict(self):
|
||||
data = {
|
||||
"name": self.name,
|
||||
"description": self.description,
|
||||
"image": self.image,
|
||||
"recipeYield": self.recipeYield,
|
||||
"recipeIngredient": [x.to_str() for x in self.recipeIngredient],
|
||||
"recipeInstructions": [x.dict() for x in self.recipeInstructions],
|
||||
"totalTime": self.totalTime,
|
||||
"prepTime": self.prepTime,
|
||||
"performTime": self.performTime,
|
||||
# Mealie
|
||||
"slug": self.slug,
|
||||
"categories": [x.to_str() for x in self.categories],
|
||||
"tags": [x.to_str() for x in self.tags],
|
||||
"dateAdded": self.dateAdded,
|
||||
"notes": [x.dict() for x in self.notes],
|
||||
"rating": self.rating,
|
||||
"orgURL": self.orgURL,
|
||||
"extras": RecipeModel._flatten_dict(self.extras),
|
||||
}
|
||||
|
||||
return data
|
||||
67
mealie/db/sql/settings_models.py
Normal file
@@ -0,0 +1,67 @@
|
||||
import sqlalchemy as sa
|
||||
import sqlalchemy.orm as orm
|
||||
from db.sql.model_base import BaseMixins, SqlAlchemyBase
|
||||
|
||||
|
||||
class SiteSettingsModel(SqlAlchemyBase):
|
||||
__tablename__ = "site_settings"
|
||||
name = sa.Column(sa.String, primary_key=True)
|
||||
webhooks = orm.relationship("WebHookModel", uselist=False, cascade="all, delete")
|
||||
|
||||
def __init__(self, name: str = None, webhooks: dict = None) -> None:
|
||||
self.name = name
|
||||
self.webhooks = WebHookModel(**webhooks)
|
||||
|
||||
def update(self, session, name, webhooks: dict) -> dict:
|
||||
self.name = name
|
||||
self.webhooks.update(session=session, **webhooks)
|
||||
return
|
||||
|
||||
def dict(self):
|
||||
data = {"name": self.name, "webhooks": self.webhooks.dict()}
|
||||
return data
|
||||
|
||||
|
||||
class WebHookModel(SqlAlchemyBase, BaseMixins):
|
||||
__tablename__ = "webhook_settings"
|
||||
id = sa.Column(sa.Integer, primary_key=True)
|
||||
parent_id = sa.Column(sa.String, sa.ForeignKey("site_settings.name"))
|
||||
webhookURLs = orm.relationship(
|
||||
"WebhookURLModel", uselist=True, cascade="all, delete"
|
||||
)
|
||||
webhookTime = sa.Column(sa.String, default="00:00")
|
||||
enabled = sa.Column(sa.Boolean, default=False)
|
||||
|
||||
def __init__(
|
||||
self, webhookURLs: list, webhookTime: str, enabled: bool = False
|
||||
) -> None:
|
||||
|
||||
self.webhookURLs = [WebhookURLModel(url=x) for x in webhookURLs]
|
||||
self.webhookTime = webhookTime
|
||||
self.enabled = enabled
|
||||
|
||||
def update(
|
||||
self, session, webhookURLs: list, webhookTime: str, enabled: bool
|
||||
) -> None:
|
||||
|
||||
self._sql_remove_list(session, [WebhookURLModel], self.id)
|
||||
|
||||
self.__init__(webhookURLs, webhookTime, enabled)
|
||||
|
||||
def dict(self):
|
||||
data = {
|
||||
"webhookURLs": [url.to_str() for url in self.webhookURLs],
|
||||
"webhookTime": self.webhookTime,
|
||||
"enabled": self.enabled,
|
||||
}
|
||||
return data
|
||||
|
||||
|
||||
class WebhookURLModel(SqlAlchemyBase):
|
||||
__tablename__ = "webhook_urls"
|
||||
id = sa.Column(sa.Integer, primary_key=True)
|
||||
url = sa.Column(sa.String)
|
||||
parent_id = sa.Column(sa.Integer, sa.ForeignKey("webhook_settings.id"))
|
||||
|
||||
def to_str(self):
|
||||
return self.url
|
||||
64
mealie/db/sql/theme_models.py
Normal file
@@ -0,0 +1,64 @@
|
||||
import sqlalchemy as sa
|
||||
import sqlalchemy.orm as orm
|
||||
from db.sql.model_base import BaseMixins, SqlAlchemyBase
|
||||
|
||||
|
||||
class SiteThemeModel(SqlAlchemyBase):
|
||||
__tablename__ = "site_theme"
|
||||
name = sa.Column(sa.String, primary_key=True)
|
||||
colors = orm.relationship("ThemeColorsModel", uselist=False, cascade="all, delete")
|
||||
|
||||
def __init__(self, name: str, colors: dict) -> None:
|
||||
self.name = name
|
||||
self.colors = ThemeColorsModel(**colors)
|
||||
|
||||
def update(self, name, colors: dict) -> dict:
|
||||
self.colors.update(**colors)
|
||||
return self.dict()
|
||||
|
||||
def dict(self):
|
||||
data = {"name": self.name, "colors": self.colors.dict()}
|
||||
return data
|
||||
|
||||
|
||||
class ThemeColorsModel(SqlAlchemyBase):
|
||||
__tablename__ = "theme_colors"
|
||||
id = sa.Column(sa.Integer, primary_key=True)
|
||||
parent_id = sa.Column(sa.String, sa.ForeignKey("site_theme.name"))
|
||||
primary = sa.Column(sa.String)
|
||||
accent = sa.Column(sa.String)
|
||||
secondary = sa.Column(sa.String)
|
||||
success = sa.Column(sa.String)
|
||||
info = sa.Column(sa.String)
|
||||
warning = sa.Column(sa.String)
|
||||
error = sa.Column(sa.String)
|
||||
|
||||
def update(
|
||||
self,
|
||||
primary: str = None,
|
||||
accent: str = None,
|
||||
secondary: str = None,
|
||||
success: str = None,
|
||||
info: str = None,
|
||||
warning: str = None,
|
||||
error: str = None,
|
||||
) -> None:
|
||||
self.primary = primary
|
||||
self.accent = accent
|
||||
self.secondary = secondary
|
||||
self.success = success
|
||||
self.info = info
|
||||
self.warning = warning
|
||||
self.error = error
|
||||
|
||||
def dict(self):
|
||||
data = {
|
||||
"primary": self.primary,
|
||||
"accent": self.accent,
|
||||
"secondary": self.secondary,
|
||||
"success": self.success,
|
||||
"info": self.info,
|
||||
"warning": self.warning,
|
||||
"error": self.error,
|
||||
}
|
||||
return data
|
||||
@@ -1,6 +0,0 @@
|
||||
|
||||
import mongoengine
|
||||
|
||||
class User(mongoengine.Document):
|
||||
username: mongoengine.EmailField()
|
||||
# password: mongoengine.ReferenceField
|
||||
@@ -1,3 +1,4 @@
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
@@ -5,7 +6,7 @@ from pydantic import BaseModel
|
||||
|
||||
class BackupJob(BaseModel):
|
||||
tag: Optional[str]
|
||||
template: Optional[str]
|
||||
template: Optional[List[str]]
|
||||
|
||||
class Config:
|
||||
schema_extra = {
|
||||
@@ -16,14 +17,45 @@ class BackupJob(BaseModel):
|
||||
}
|
||||
|
||||
|
||||
class LocalBackup(BaseModel):
|
||||
name: str
|
||||
date: datetime
|
||||
|
||||
|
||||
class Imports(BaseModel):
|
||||
imports: List[str]
|
||||
imports: List[LocalBackup]
|
||||
templates: List[str]
|
||||
|
||||
class Config:
|
||||
schema_extra = {
|
||||
"example": {
|
||||
"imports": ["sample_data.zip", "sampe_data2.zip"],
|
||||
"imports": [
|
||||
{
|
||||
"name": "AutoBackup_12-1-2020.zip",
|
||||
"date": datetime.now(),
|
||||
}
|
||||
],
|
||||
"templates": ["recipes.md", "custom_template.md"],
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class ImportJob(BaseModel):
|
||||
name: str
|
||||
recipes: bool
|
||||
force: bool = False
|
||||
rebase: bool = False
|
||||
themes: bool = False
|
||||
settings: bool = False
|
||||
|
||||
class Config:
|
||||
schema_extra = {
|
||||
"example": {
|
||||
"name": "my_local_backup.zip",
|
||||
"recipes": True,
|
||||
"force": False,
|
||||
"rebase": False,
|
||||
"themes": False,
|
||||
"settings": False
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,51 +1,59 @@
|
||||
import operator
|
||||
|
||||
from app_config import BACKUP_DIR, TEMPLATE_DIR
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from models.backup_models import BackupJob, Imports
|
||||
from services.backup_services import (
|
||||
BACKUP_DIR,
|
||||
TEMPLATE_DIR,
|
||||
export_db,
|
||||
import_from_archive,
|
||||
)
|
||||
from models.backup_models import BackupJob, ImportJob, Imports, LocalBackup
|
||||
from services.backups.exports import backup_all
|
||||
from services.backups.imports import ImportDatabase
|
||||
from utils.snackbar import SnackResponse
|
||||
|
||||
router = APIRouter()
|
||||
router = APIRouter(tags=["Import / Export"])
|
||||
|
||||
|
||||
@router.get("/api/backups/available/", tags=["Import / Export"], response_model=Imports)
|
||||
async def available_imports():
|
||||
@router.get("/api/backups/available/", response_model=Imports)
|
||||
def available_imports():
|
||||
"""Returns a list of avaiable .zip files for import into Mealie."""
|
||||
imports = []
|
||||
templates = []
|
||||
for archive in BACKUP_DIR.glob("*.zip"):
|
||||
imports.append(archive.name)
|
||||
backup = LocalBackup(name=archive.name, date=archive.stat().st_ctime)
|
||||
imports.append(backup)
|
||||
|
||||
for template in TEMPLATE_DIR.glob("*.md"):
|
||||
templates.append(template.name)
|
||||
|
||||
imports.sort(key=operator.attrgetter("date"), reverse=True)
|
||||
|
||||
return Imports(imports=imports, templates=templates)
|
||||
|
||||
|
||||
@router.post("/api/backups/export/database/", tags=["Import / Export"], status_code=201)
|
||||
async def export_database(data: BackupJob):
|
||||
@router.post("/api/backups/export/database/", status_code=201)
|
||||
def export_database(data: BackupJob):
|
||||
"""Generates a backup of the recipe database in json format."""
|
||||
|
||||
export_path = backup_all(data.tag, data.template)
|
||||
try:
|
||||
export_path = export_db(data.tag, data.template)
|
||||
return SnackResponse.success("Backup Created at " + export_path)
|
||||
except:
|
||||
HTTPException(
|
||||
status_code=400,
|
||||
detail=SnackResponse.error("Error Creating Backup. See Log File"),
|
||||
)
|
||||
|
||||
return SnackResponse.success("Backup Created at " + export_path)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/api/backups/{file_name}/import/", tags=["Import / Export"], status_code=200
|
||||
)
|
||||
async def import_database(file_name: str):
|
||||
@router.post("/api/backups/{file_name}/import/", status_code=200)
|
||||
def import_database(file_name: str, import_data: ImportJob):
|
||||
""" Import a database backup file generated from Mealie. """
|
||||
imported = import_from_archive(file_name)
|
||||
|
||||
import_db = ImportDatabase(
|
||||
zip_archive=import_data.name,
|
||||
import_recipes=import_data.recipes,
|
||||
force_import=import_data.force,
|
||||
rebase=import_data.rebase,
|
||||
import_settings=import_data.settings,
|
||||
import_themes=import_data.themes,
|
||||
)
|
||||
|
||||
imported = import_db.run()
|
||||
return imported
|
||||
|
||||
|
||||
@@ -54,7 +62,7 @@ async def import_database(file_name: str):
|
||||
tags=["Import / Export"],
|
||||
status_code=200,
|
||||
)
|
||||
async def delete_backup(backup_name: str):
|
||||
def delete_backup(backup_name: str):
|
||||
""" Removes a database backup from the file system """
|
||||
|
||||
try:
|
||||
|
||||
@@ -1,22 +1,21 @@
|
||||
from typing import List
|
||||
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from models.recipe_models import SlugResponse
|
||||
from services.meal_services import MealPlan
|
||||
from utils.snackbar import SnackResponse
|
||||
|
||||
router = APIRouter()
|
||||
router = APIRouter(tags=["Meal Plan"])
|
||||
|
||||
|
||||
@router.get("/api/meal-plan/all/", tags=["Meal Plan"], response_model=List[MealPlan])
|
||||
async def get_all_meals():
|
||||
@router.get("/api/meal-plan/all/", response_model=List[MealPlan])
|
||||
def get_all_meals():
|
||||
""" Returns a list of all available Meal Plan """
|
||||
|
||||
return MealPlan.get_all()
|
||||
|
||||
|
||||
@router.post("/api/meal-plan/create/", tags=["Meal Plan"])
|
||||
async def set_meal_plan(data: MealPlan):
|
||||
@router.post("/api/meal-plan/create/")
|
||||
def set_meal_plan(data: MealPlan):
|
||||
""" Creates a meal plan database entry """
|
||||
data.process_meals()
|
||||
data.save_to_db()
|
||||
@@ -29,24 +28,25 @@ async def set_meal_plan(data: MealPlan):
|
||||
return SnackResponse.success("Mealplan Created")
|
||||
|
||||
|
||||
@router.post("/api/meal-plan/{plan_id}/update/", tags=["Meal Plan"])
|
||||
async def update_meal_plan(plan_id: str, meal_plan: MealPlan):
|
||||
@router.post("/api/meal-plan/{plan_id}/update/")
|
||||
def update_meal_plan(plan_id: str, meal_plan: MealPlan):
|
||||
""" Updates a meal plan based off ID """
|
||||
|
||||
try:
|
||||
meal_plan.process_meals()
|
||||
meal_plan.update(plan_id)
|
||||
except:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=SnackResponse.error("Unable to Update Mealplan"),
|
||||
)
|
||||
meal_plan.process_meals()
|
||||
meal_plan.update(plan_id)
|
||||
# try:
|
||||
# meal_plan.process_meals()
|
||||
# meal_plan.update(plan_id)
|
||||
# except:
|
||||
# raise HTTPException(
|
||||
# status_code=404,
|
||||
# detail=SnackResponse.error("Unable to Update Mealplan"),
|
||||
# )
|
||||
|
||||
return SnackResponse.success("Mealplan Updated")
|
||||
|
||||
|
||||
@router.delete("/api/meal-plan/{plan_id}/delete/", tags=["Meal Plan"])
|
||||
async def delete_meal_plan(plan_id):
|
||||
@router.delete("/api/meal-plan/{plan_id}/delete/")
|
||||
def delete_meal_plan(plan_id):
|
||||
""" Removes a meal plan from the database """
|
||||
|
||||
MealPlan.delete(plan_id)
|
||||
@@ -58,7 +58,7 @@ async def delete_meal_plan(plan_id):
|
||||
"/api/meal-plan/today/",
|
||||
tags=["Meal Plan"],
|
||||
)
|
||||
async def get_today():
|
||||
def get_today():
|
||||
"""
|
||||
Returns the recipe slug for the meal scheduled for today.
|
||||
If no meal is scheduled nothing is returned
|
||||
@@ -67,8 +67,8 @@ async def get_today():
|
||||
return MealPlan.today()
|
||||
|
||||
|
||||
@router.get("/api/meal-plan/this-week/", tags=["Meal Plan"], response_model=MealPlan)
|
||||
async def get_this_week():
|
||||
@router.get("/api/meal-plan/this-week/", response_model=MealPlan)
|
||||
def get_this_week():
|
||||
""" Returns the meal plan data for this week """
|
||||
|
||||
return MealPlan.this_week()
|
||||
|
||||
@@ -4,15 +4,15 @@ from fastapi import APIRouter, File, HTTPException, UploadFile
|
||||
from models.migration_models import ChowdownURL
|
||||
from services.migrations.chowdown import chowdown_migrate as chowdow_migrate
|
||||
from services.migrations.nextcloud import migrate as nextcloud_migrate
|
||||
from settings import MIGRATION_DIR
|
||||
from app_config import MIGRATION_DIR
|
||||
from utils.snackbar import SnackResponse
|
||||
|
||||
router = APIRouter()
|
||||
router = APIRouter(tags=["Migration"])
|
||||
|
||||
|
||||
# Chowdown
|
||||
@router.post("/api/migration/chowdown/repo/", tags=["Migration"])
|
||||
async def import_chowdown_recipes(repo: ChowdownURL):
|
||||
@router.post("/api/migration/chowdown/repo/")
|
||||
def import_chowdown_recipes(repo: ChowdownURL):
|
||||
""" Import Chowsdown Recipes from Repo URL """
|
||||
try:
|
||||
report = chowdow_migrate(repo.url)
|
||||
@@ -30,8 +30,8 @@ async def import_chowdown_recipes(repo: ChowdownURL):
|
||||
|
||||
|
||||
# Nextcloud
|
||||
@router.get("/api/migration/nextcloud/available/", tags=["Migration"])
|
||||
async def get_avaiable_nextcloud_imports():
|
||||
@router.get("/api/migration/nextcloud/available/")
|
||||
def get_avaiable_nextcloud_imports():
|
||||
""" Returns a list of avaiable directories that can be imported into Mealie """
|
||||
available = []
|
||||
for dir in MIGRATION_DIR.iterdir():
|
||||
@@ -43,15 +43,15 @@ async def get_avaiable_nextcloud_imports():
|
||||
return available
|
||||
|
||||
|
||||
@router.post("/api/migration/nextcloud/{selection}/import/", tags=["Migration"])
|
||||
async def import_nextcloud_directory(selection: str):
|
||||
@router.post("/api/migration/nextcloud/{selection}/import/")
|
||||
def import_nextcloud_directory(selection: str):
|
||||
""" Imports all the recipes in a given directory """
|
||||
|
||||
return nextcloud_migrate(selection)
|
||||
|
||||
|
||||
@router.delete("/api/migration/{file_folder_name}/delete/", tags=["Migration"])
|
||||
async def delete_migration_data(file_folder_name: str):
|
||||
@router.delete("/api/migration/{file_folder_name}/delete/")
|
||||
def delete_migration_data(file_folder_name: str):
|
||||
""" Removes migration data from the file system """
|
||||
|
||||
remove_path = MIGRATION_DIR.joinpath(file_folder_name)
|
||||
@@ -66,8 +66,8 @@ async def delete_migration_data(file_folder_name: str):
|
||||
return SnackResponse.info(f"Migration Data Remove: {remove_path.absolute()}")
|
||||
|
||||
|
||||
@router.post("/api/migration/upload/", tags=["Migration"])
|
||||
async def upload_nextcloud_zipfile(archive: UploadFile = File(...)):
|
||||
@router.post("/api/migration/upload/")
|
||||
def upload_nextcloud_zipfile(archive: UploadFile = File(...)):
|
||||
""" Upload a .zip File to later be imported into Mealie """
|
||||
dest = MIGRATION_DIR.joinpath(archive.filename)
|
||||
|
||||
|
||||
@@ -8,13 +8,11 @@ from services.recipe_services import Recipe, read_requested_values
|
||||
from services.scrape_services import create_from_url
|
||||
from utils.snackbar import SnackResponse
|
||||
|
||||
router = APIRouter()
|
||||
router = APIRouter(tags=["Recipes"])
|
||||
|
||||
|
||||
@router.get("/api/all-recipes/", tags=["Recipes"], response_model=List[dict])
|
||||
async def get_all_recipes(
|
||||
keys: Optional[List[str]] = Query(...), num: Optional[int] = 100
|
||||
):
|
||||
@router.get("/api/all-recipes/", response_model=List[dict])
|
||||
def get_all_recipes(keys: Optional[List[str]] = Query(...), num: Optional[int] = 100):
|
||||
"""
|
||||
Returns key data for all recipes based off the query paramters provided.
|
||||
For example, if slug, image, and name are provided you will recieve a list of
|
||||
@@ -30,8 +28,8 @@ async def get_all_recipes(
|
||||
return all_recipes
|
||||
|
||||
|
||||
@router.post("/api/all-recipes/", tags=["Recipes"], response_model=List[dict])
|
||||
async def get_all_recipes_post(body: AllRecipeRequest):
|
||||
@router.post("/api/all-recipes/", response_model=List[dict])
|
||||
def get_all_recipes_post(body: AllRecipeRequest):
|
||||
"""
|
||||
Returns key data for all recipes based off the body data provided.
|
||||
For example, if slug, image, and name are provided you will recieve a list of
|
||||
@@ -46,16 +44,16 @@ async def get_all_recipes_post(body: AllRecipeRequest):
|
||||
return all_recipes
|
||||
|
||||
|
||||
@router.get("/api/recipe/{recipe_slug}/", tags=["Recipes"], response_model=Recipe)
|
||||
async def get_recipe(recipe_slug: str):
|
||||
@router.get("/api/recipe/{recipe_slug}/", response_model=Recipe)
|
||||
def get_recipe(recipe_slug: str):
|
||||
""" Takes in a recipe slug, returns all data for a recipe """
|
||||
recipe = Recipe.get_by_slug(recipe_slug)
|
||||
|
||||
return recipe
|
||||
|
||||
|
||||
@router.get("/api/recipe/image/{recipe_slug}/", tags=["Recipes"])
|
||||
async def get_recipe_img(recipe_slug: str):
|
||||
@router.get("/api/recipe/image/{recipe_slug}/")
|
||||
def get_recipe_img(recipe_slug: str):
|
||||
""" Takes in a recipe slug, returns the static image """
|
||||
recipe_image = read_image(recipe_slug)
|
||||
|
||||
@@ -69,7 +67,7 @@ async def get_recipe_img(recipe_slug: str):
|
||||
status_code=201,
|
||||
response_model=str,
|
||||
)
|
||||
async def parse_recipe_url(url: RecipeURLIn):
|
||||
def parse_recipe_url(url: RecipeURLIn):
|
||||
""" Takes in a URL and attempts to scrape data and load it into the database """
|
||||
|
||||
slug = create_from_url(url.url)
|
||||
@@ -77,35 +75,36 @@ async def parse_recipe_url(url: RecipeURLIn):
|
||||
return slug
|
||||
|
||||
|
||||
@router.post("/api/recipe/create/", tags=["Recipes"])
|
||||
async def create_from_json(data: Recipe) -> str:
|
||||
@router.post("/api/recipe/create/")
|
||||
def create_from_json(data: Recipe) -> str:
|
||||
""" Takes in a JSON string and loads data into the database as a new entry"""
|
||||
created_recipe = data.save_to_db()
|
||||
|
||||
return created_recipe
|
||||
|
||||
|
||||
@router.post("/api/recipe/{recipe_slug}/update/image/", tags=["Recipes"])
|
||||
@router.post("/api/recipe/{recipe_slug}/update/image/")
|
||||
def update_recipe_image(
|
||||
recipe_slug: str, image: bytes = File(...), extension: str = Form(...)
|
||||
):
|
||||
""" Removes an existing image and replaces it with the incoming file. """
|
||||
response = write_image(recipe_slug, image, extension)
|
||||
Recipe.update_image(recipe_slug, extension)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@router.post("/api/recipe/{recipe_slug}/update/", tags=["Recipes"])
|
||||
async def update_recipe(recipe_slug: str, data: Recipe):
|
||||
""" Updates a recipe by existing slug and data. Data should containt """
|
||||
@router.post("/api/recipe/{recipe_slug}/update/")
|
||||
def update_recipe(recipe_slug: str, data: Recipe):
|
||||
""" Updates a recipe by existing slug and data. """
|
||||
|
||||
data.update(recipe_slug)
|
||||
new_slug = data.update(recipe_slug)
|
||||
|
||||
return {"message": "PLACEHOLDER"}
|
||||
return new_slug
|
||||
|
||||
|
||||
@router.delete("/api/recipe/{recipe_slug}/delete/", tags=["Recipes"])
|
||||
async def delete_recipe(recipe_slug: str):
|
||||
@router.delete("/api/recipe/{recipe_slug}/delete/")
|
||||
def delete_recipe(recipe_slug: str):
|
||||
""" Deletes a recipe by slug """
|
||||
|
||||
try:
|
||||
|
||||
@@ -1,99 +1,91 @@
|
||||
from typing import List
|
||||
|
||||
from db.mongo_setup import global_init
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from services.scheduler_services import Scheduler, post_webhooks
|
||||
from services.scheduler_services import post_webhooks
|
||||
from services.settings_services import SiteSettings, SiteTheme
|
||||
from utils.global_scheduler import scheduler
|
||||
from utils.snackbar import SnackResponse
|
||||
|
||||
router = APIRouter()
|
||||
global_init()
|
||||
|
||||
scheduler = Scheduler()
|
||||
scheduler.startup_scheduler()
|
||||
router = APIRouter(tags=["Settings"])
|
||||
|
||||
|
||||
@router.get("/api/site-settings/", tags=["Settings"])
|
||||
async def get_main_settings():
|
||||
@router.get("/api/site-settings/")
|
||||
def get_main_settings():
|
||||
""" Returns basic site settings """
|
||||
|
||||
return SiteSettings.get_site_settings()
|
||||
|
||||
|
||||
@router.post("/api/site-settings/webhooks/test/", tags=["Settings"])
|
||||
async def test_webhooks():
|
||||
@router.post("/api/site-settings/webhooks/test/")
|
||||
def test_webhooks():
|
||||
""" Run the function to test your webhooks """
|
||||
|
||||
return post_webhooks()
|
||||
|
||||
|
||||
@router.post("/api/site-settings/update/", tags=["Settings"])
|
||||
async def update_settings(data: SiteSettings):
|
||||
@router.post("/api/site-settings/update/")
|
||||
def update_settings(data: SiteSettings):
|
||||
""" Returns Site Settings """
|
||||
|
||||
try:
|
||||
data.update()
|
||||
except:
|
||||
raise HTTPException(
|
||||
status_code=400, detail=SnackResponse.error("Unable to Save Settings")
|
||||
)
|
||||
data.update()
|
||||
# try:
|
||||
# data.update()
|
||||
# except:
|
||||
# raise HTTPException(
|
||||
# status_code=400, detail=SnackResponse.error("Unable to Save Settings")
|
||||
# )
|
||||
|
||||
scheduler.reschedule_webhooks()
|
||||
return SnackResponse.success("Settings Updated")
|
||||
|
||||
|
||||
@router.get(
|
||||
"/api/site-settings/themes/", tags=["Themes"]
|
||||
)
|
||||
async def get_all_themes():
|
||||
@router.get("/api/site-settings/themes/", tags=["Themes"])
|
||||
def get_all_themes():
|
||||
""" Returns all site themes """
|
||||
|
||||
return SiteTheme.get_all()
|
||||
|
||||
|
||||
@router.get(
|
||||
"/api/site-settings/themes/{theme_name}/", tags=["Themes"]
|
||||
)
|
||||
async def get_single_theme(theme_name: str):
|
||||
@router.get("/api/site-settings/themes/{theme_name}/", tags=["Themes"])
|
||||
def get_single_theme(theme_name: str):
|
||||
""" Returns a named theme """
|
||||
return SiteTheme.get_by_name(theme_name)
|
||||
|
||||
|
||||
@router.post("/api/site-settings/themes/create/", tags=["Themes"])
|
||||
async def create_theme(data: SiteTheme):
|
||||
def create_theme(data: SiteTheme):
|
||||
""" Creates a site color theme database entry """
|
||||
|
||||
try:
|
||||
data.save_to_db()
|
||||
except:
|
||||
raise HTTPException(
|
||||
status_code=400, detail=SnackResponse.error("Unable to Save Theme")
|
||||
)
|
||||
data.save_to_db()
|
||||
# try:
|
||||
# data.save_to_db()
|
||||
# except:
|
||||
# raise HTTPException(
|
||||
# status_code=400, detail=SnackResponse.error("Unable to Save Theme")
|
||||
# )
|
||||
|
||||
return SnackResponse.success("Theme Saved")
|
||||
|
||||
|
||||
@router.post("/api/site-settings/themes/{theme_name}/update/", tags=["Themes"])
|
||||
async def update_theme(theme_name: str, data: SiteTheme):
|
||||
def update_theme(theme_name: str, data: SiteTheme):
|
||||
""" Update a theme database entry """
|
||||
try:
|
||||
data.update_document()
|
||||
except:
|
||||
raise HTTPException(
|
||||
status_code=400, detail=SnackResponse.error("Unable to Update Theme")
|
||||
)
|
||||
data.update_document()
|
||||
|
||||
# try:
|
||||
# except:
|
||||
# raise HTTPException(
|
||||
# status_code=400, detail=SnackResponse.error("Unable to Update Theme")
|
||||
# )
|
||||
|
||||
return SnackResponse.success("Theme Updated")
|
||||
|
||||
|
||||
@router.delete("/api/site-settings/themes/{theme_name}/delete/", tags=["Themes"])
|
||||
async def delete_theme(theme_name: str):
|
||||
def delete_theme(theme_name: str):
|
||||
""" Deletes theme from the database """
|
||||
try:
|
||||
SiteTheme.delete_theme(theme_name)
|
||||
except:
|
||||
raise HTTPException(
|
||||
status_code=400, detail=SnackResponse.error("Unable to Delete Theme")
|
||||
)
|
||||
SiteTheme.delete_theme(theme_name)
|
||||
# try:
|
||||
# SiteTheme.delete_theme(theme_name)
|
||||
# except:
|
||||
# raise HTTPException(
|
||||
# status_code=400, detail=SnackResponse.error("Unable to Delete Theme")
|
||||
# )
|
||||
|
||||
return SnackResponse.success("Theme Deleted")
|
||||
|
||||
@@ -6,19 +6,19 @@ from fastapi.responses import FileResponse
|
||||
CWD = Path(__file__).parent
|
||||
WEB_PATH = CWD.parent.joinpath("dist")
|
||||
BASE_HTML = WEB_PATH.joinpath("index.html")
|
||||
router = APIRouter()
|
||||
router = APIRouter(include_in_schema=False)
|
||||
|
||||
|
||||
@router.get("/favicon.ico", include_in_schema=False)
|
||||
@router.get("/favicon.ico")
|
||||
def facivon():
|
||||
return responses.RedirectResponse(url="/mealie/favicon.ico")
|
||||
|
||||
|
||||
@router.get("/", include_in_schema=False)
|
||||
@router.get("/")
|
||||
def root():
|
||||
return FileResponse(BASE_HTML)
|
||||
|
||||
|
||||
@router.get("/{full_path:path}", include_in_schema=False)
|
||||
@router.get("/{full_path:path}")
|
||||
def root_plus(full_path):
|
||||
return FileResponse(BASE_HTML)
|
||||
|
||||
@@ -1,149 +0,0 @@
|
||||
import json
|
||||
import shutil
|
||||
import zipfile
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from db.recipe_models import RecipeDocument
|
||||
from jinja2 import Template
|
||||
from utils.logger import logger
|
||||
|
||||
from services.recipe_services import IMG_DIR
|
||||
|
||||
CWD = Path(__file__).parent
|
||||
BACKUP_DIR = CWD.parent.joinpath("data", "backups")
|
||||
TEMPLATE_DIR = CWD.parent.joinpath("data", "templates")
|
||||
TEMP_DIR = CWD.parent.joinpath("data", "temp")
|
||||
|
||||
|
||||
def auto_backup_job():
|
||||
for backup in BACKUP_DIR.glob("Auto*.zip"):
|
||||
backup.unlink()
|
||||
|
||||
templates = []
|
||||
for template in TEMPLATE_DIR.iterdir():
|
||||
templates.append(template)
|
||||
|
||||
export_db(tag="Auto", templates=templates)
|
||||
logger.info("Auto Backup Called")
|
||||
|
||||
|
||||
def import_migration(recipe_dict: dict) -> dict:
|
||||
del recipe_dict["_id"]
|
||||
del recipe_dict["dateAdded"]
|
||||
|
||||
# Migration from list to Object Type Data
|
||||
if type(recipe_dict["extras"]) == list:
|
||||
recipe_dict["extras"] = {}
|
||||
|
||||
return recipe_dict
|
||||
|
||||
|
||||
def import_from_archive(file_name: str) -> list:
|
||||
successful_imports = []
|
||||
failed_imports = []
|
||||
|
||||
file_path = BACKUP_DIR.joinpath(file_name)
|
||||
|
||||
with zipfile.ZipFile(file_path, "r") as zip_ref:
|
||||
zip_ref.extractall(TEMP_DIR)
|
||||
|
||||
recipe_dir = TEMP_DIR.joinpath("recipes")
|
||||
for recipe in recipe_dir.glob("*.json"):
|
||||
with open(recipe, "r") as f:
|
||||
recipe_dict = json.loads(f.read())
|
||||
|
||||
try:
|
||||
recipe_dict = import_migration(recipe_dict)
|
||||
recipeDoc = RecipeDocument(**recipe_dict)
|
||||
recipeDoc.save()
|
||||
successful_imports.append(recipe.stem)
|
||||
except:
|
||||
logger.info(f"Failed Import: {recipe.stem}")
|
||||
failed_imports.append(recipe.stem)
|
||||
|
||||
image_dir = TEMP_DIR.joinpath("images")
|
||||
for image in image_dir.iterdir():
|
||||
if image.stem in successful_imports:
|
||||
shutil.copy(image, IMG_DIR)
|
||||
|
||||
shutil.rmtree(TEMP_DIR)
|
||||
|
||||
return {"successful": successful_imports, "failed": failed_imports}
|
||||
|
||||
|
||||
def export_db(tag=None, templates=None):
|
||||
if tag:
|
||||
export_tag = tag + "_" + datetime.now().strftime("%Y-%b-%d")
|
||||
else:
|
||||
export_tag = datetime.now().strftime("%Y-%b-%d")
|
||||
|
||||
backup_folder = TEMP_DIR.joinpath(export_tag)
|
||||
backup_folder.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
img_folder = backup_folder.joinpath("images")
|
||||
img_folder.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
recipe_folder = backup_folder.joinpath("recipes")
|
||||
recipe_folder.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
export_images(img_folder)
|
||||
|
||||
if type(templates) == list:
|
||||
for template in templates:
|
||||
export_recipes(recipe_folder, template)
|
||||
elif type(templates) == str:
|
||||
export_recipes(recipe_folder, templates)
|
||||
else:
|
||||
export_recipes(recipe_folder)
|
||||
|
||||
zip_path = BACKUP_DIR.joinpath(f"{export_tag}")
|
||||
shutil.make_archive(zip_path, "zip", backup_folder)
|
||||
|
||||
shutil.rmtree(backup_folder)
|
||||
shutil.rmtree(TEMP_DIR)
|
||||
|
||||
return str(zip_path.absolute()) + ".zip"
|
||||
|
||||
|
||||
def export_images(dest_dir) -> Path:
|
||||
for file in IMG_DIR.iterdir():
|
||||
shutil.copy(file, dest_dir.joinpath(file.name))
|
||||
|
||||
|
||||
def export_recipes(dest_dir: Path, template=None) -> Path:
|
||||
all_recipes = RecipeDocument.objects()
|
||||
logger.info(f"Backing Up Recipes: {all_recipes}")
|
||||
for recipe in all_recipes:
|
||||
json_recipe = recipe.to_json(indent=4)
|
||||
|
||||
if template:
|
||||
md_dest = dest_dir.parent.joinpath("templates")
|
||||
md_dest.mkdir(parents=True, exist_ok=True)
|
||||
template = TEMPLATE_DIR.joinpath(template)
|
||||
export_markdown(md_dest, json_recipe, template)
|
||||
|
||||
filename = recipe.slug + ".json"
|
||||
file_path = dest_dir.joinpath(filename)
|
||||
|
||||
with open(file_path, "w") as f:
|
||||
f.write(json_recipe)
|
||||
|
||||
|
||||
def export_markdown(dest_dir: Path, recipe_data: json, template=Path) -> Path:
|
||||
recipe_data: dict = json.loads(recipe_data)
|
||||
recipe_template = TEMPLATE_DIR.joinpath("recipes.md")
|
||||
|
||||
with open(recipe_template, "r") as f:
|
||||
template = Template(f.read())
|
||||
|
||||
out_file = dest_dir.joinpath(recipe_data["slug"] + ".md")
|
||||
|
||||
content = template.render(recipe=recipe_data)
|
||||
|
||||
with open(out_file, "w") as f:
|
||||
f.write(content)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pass
|
||||
147
mealie/services/backups/exports.py
Normal file
@@ -0,0 +1,147 @@
|
||||
import json
|
||||
import shutil
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from jinja2 import Template
|
||||
from services.meal_services import MealPlan
|
||||
from services.recipe_services import Recipe
|
||||
from services.settings_services import SiteSettings, SiteTheme
|
||||
from app_config import BACKUP_DIR, IMG_DIR, TEMP_DIR, TEMPLATE_DIR
|
||||
from utils.logger import logger
|
||||
|
||||
|
||||
class ExportDatabase:
|
||||
def __init__(self, tag=None, templates=None) -> None:
|
||||
"""Export a Mealie database. Export interacts directly with class objects and can be used
|
||||
with any supported backend database platform. By default tags are timestands, and no Jinja2 templates are rendered
|
||||
|
||||
|
||||
Args:
|
||||
tag ([str], optional): A str to be used as a file tag. Defaults to None.
|
||||
templates (list, optional): A list of template file names. Defaults to None.
|
||||
"""
|
||||
if tag:
|
||||
export_tag = tag + "_" + datetime.now().strftime("%Y-%b-%d")
|
||||
else:
|
||||
export_tag = datetime.now().strftime("%Y-%b-%d")
|
||||
|
||||
self.main_dir = TEMP_DIR.joinpath(export_tag)
|
||||
self.img_dir = self.main_dir.joinpath("images")
|
||||
self.recipe_dir = self.main_dir.joinpath("recipes")
|
||||
self.themes_dir = self.main_dir.joinpath("themes")
|
||||
self.settings_dir = self.main_dir.joinpath("settings")
|
||||
self.templates_dir = self.main_dir.joinpath("templates")
|
||||
self.mealplans_dir = self.main_dir.joinpath("mealplans")
|
||||
|
||||
try:
|
||||
self.templates = [TEMPLATE_DIR.joinpath(x) for x in templates]
|
||||
except:
|
||||
self.templates = False
|
||||
logger.info("No Jinja2 Templates Registered for Export")
|
||||
|
||||
required_dirs = [
|
||||
self.main_dir,
|
||||
self.img_dir,
|
||||
self.recipe_dir,
|
||||
self.themes_dir,
|
||||
self.settings_dir,
|
||||
self.templates_dir,
|
||||
self.mealplans_dir,
|
||||
]
|
||||
|
||||
for dir in required_dirs:
|
||||
dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def export_recipes(self):
|
||||
all_recipes = Recipe.get_all()
|
||||
|
||||
for recipe in all_recipes:
|
||||
logger.info(f"Backing Up Recipes: {recipe}")
|
||||
|
||||
filename = recipe.get("slug") + ".json"
|
||||
file_path = self.recipe_dir.joinpath(filename)
|
||||
|
||||
ExportDatabase._write_json_file(recipe, file_path)
|
||||
|
||||
if self.templates:
|
||||
self._export_template(recipe)
|
||||
|
||||
def _export_template(self, recipe_data: dict):
|
||||
for template_path in self.templates:
|
||||
|
||||
with open(template_path, "r") as f:
|
||||
template = Template(f.read())
|
||||
|
||||
filename = recipe_data.get("name") + template_path.suffix
|
||||
out_file = self.templates_dir.joinpath(filename)
|
||||
|
||||
content = template.render(recipe=recipe_data)
|
||||
|
||||
with open(out_file, "w") as f:
|
||||
f.write(content)
|
||||
|
||||
def export_images(self):
|
||||
for file in IMG_DIR.iterdir():
|
||||
shutil.copy(file, self.img_dir.joinpath(file.name))
|
||||
|
||||
def export_settings(self):
|
||||
all_settings = SiteSettings.get_site_settings()
|
||||
out_file = self.settings_dir.joinpath("settings.json")
|
||||
ExportDatabase._write_json_file(all_settings.dict(), out_file)
|
||||
|
||||
def export_themes(self):
|
||||
all_themes = SiteTheme.get_all()
|
||||
if all_themes:
|
||||
all_themes = [x.dict() for x in all_themes]
|
||||
out_file = self.themes_dir.joinpath("themes.json")
|
||||
ExportDatabase._write_json_file(all_themes, out_file)
|
||||
|
||||
def export_meals(
|
||||
self,
|
||||
): #! Problem Parseing Datetime Objects... May come back to this
|
||||
meal_plans = MealPlan.get_all()
|
||||
if meal_plans:
|
||||
meal_plans = [x.dict() for x in meal_plans]
|
||||
|
||||
out_file = self.mealplans_dir.joinpath("mealplans.json")
|
||||
ExportDatabase._write_json_file(meal_plans, out_file)
|
||||
|
||||
@staticmethod
|
||||
def _write_json_file(data, out_file: Path):
|
||||
json_data = json.dumps(data, indent=4, default=str)
|
||||
|
||||
with open(out_file, "w") as f:
|
||||
f.write(json_data)
|
||||
|
||||
def finish_export(self):
|
||||
zip_path = BACKUP_DIR.joinpath(f"{self.main_dir.name}")
|
||||
shutil.make_archive(zip_path, "zip", self.main_dir)
|
||||
|
||||
shutil.rmtree(TEMP_DIR)
|
||||
|
||||
return str(zip_path.absolute()) + ".zip"
|
||||
|
||||
|
||||
def backup_all(tag=None, templates=None):
|
||||
db_export = ExportDatabase(tag=tag, templates=templates)
|
||||
|
||||
db_export.export_recipes()
|
||||
db_export.export_images()
|
||||
db_export.export_settings()
|
||||
db_export.export_themes()
|
||||
db_export.export_meals()
|
||||
#
|
||||
return db_export.finish_export()
|
||||
|
||||
|
||||
def auto_backup_job():
|
||||
for backup in BACKUP_DIR.glob("Auto*.zip"):
|
||||
backup.unlink()
|
||||
|
||||
templates = []
|
||||
for template in TEMPLATE_DIR.iterdir():
|
||||
templates.append(template)
|
||||
|
||||
backup_all(tag="Auto", templates=templates)
|
||||
logger.info("Auto Backup Called")
|
||||
132
mealie/services/backups/imports.py
Normal file
@@ -0,0 +1,132 @@
|
||||
import json
|
||||
import shutil
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
from app_config import BACKUP_DIR, IMG_DIR, TEMP_DIR
|
||||
from services.recipe_services import Recipe
|
||||
from services.settings_services import SiteSettings, SiteTheme
|
||||
from utils.logger import logger
|
||||
|
||||
|
||||
class ImportDatabase:
|
||||
def __init__(
|
||||
self,
|
||||
zip_archive: str,
|
||||
import_recipes: bool = True,
|
||||
import_settings: bool = True,
|
||||
import_themes: bool = True,
|
||||
force_import: bool = False,
|
||||
rebase: bool = False,
|
||||
) -> None:
|
||||
"""Import a database.zip file exported from mealie.
|
||||
|
||||
Args:
|
||||
zip_archive (str): The filename contained in the backups directory
|
||||
import_recipes (bool, optional): Import Recipes?. Defaults to True.
|
||||
import_settings (bool, optional): Determines if settings are imported. Defaults to True.
|
||||
import_themes (bool, optional): Determines if themes are imported. Defaults to True.
|
||||
force_import (bool, optional): Force import will update all existing recipes. If False existing recipes are skipped. Defaults to False.
|
||||
rebase (bool, optional): Rebase will first clear the database and then import Recipes. Defaults to False.
|
||||
|
||||
Raises:
|
||||
Exception: If the zip file does not exists an exception raise.
|
||||
"""
|
||||
|
||||
self.archive = BACKUP_DIR.joinpath(zip_archive)
|
||||
self.imp_recipes = import_recipes
|
||||
self.imp_settings = import_settings
|
||||
self.imp_themes = import_themes
|
||||
self.force_imports = force_import
|
||||
self.force_rebase = rebase
|
||||
|
||||
if self.archive.is_file():
|
||||
self.import_dir = TEMP_DIR.joinpath("active_import")
|
||||
self.import_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with zipfile.ZipFile(self.archive, "r") as zip_ref:
|
||||
zip_ref.extractall(self.import_dir)
|
||||
pass
|
||||
else:
|
||||
raise Exception("Import file does not exist")
|
||||
|
||||
def run(self):
|
||||
if self.imp_recipes:
|
||||
report = self.import_recipes()
|
||||
if self.imp_settings:
|
||||
self.import_settings()
|
||||
if self.imp_themes:
|
||||
self.import_themes()
|
||||
|
||||
self.clean_up()
|
||||
|
||||
return report if report else None
|
||||
|
||||
def import_recipes(self):
|
||||
recipe_dir: Path = self.import_dir.joinpath("recipes")
|
||||
|
||||
successful_imports = []
|
||||
failed_imports = []
|
||||
|
||||
for recipe in recipe_dir.glob("*.json"):
|
||||
with open(recipe, "r") as f:
|
||||
recipe_dict = json.loads(f.read())
|
||||
recipe_dict = ImportDatabase._recipe_migration(recipe_dict)
|
||||
try:
|
||||
recipe_obj = Recipe(**recipe_dict)
|
||||
recipe_obj.save_to_db()
|
||||
successful_imports.append(recipe.stem)
|
||||
logger.info(f"Imported: {recipe.stem}")
|
||||
except:
|
||||
logger.info(f"Failed Import: {recipe.stem}")
|
||||
failed_imports.append(recipe.stem)
|
||||
|
||||
self._import_images(successful_imports)
|
||||
|
||||
return {"successful": successful_imports, "failed": failed_imports}
|
||||
|
||||
@staticmethod
|
||||
def _recipe_migration(recipe_dict: dict) -> dict:
|
||||
try:
|
||||
del recipe_dict["_id"]
|
||||
del recipe_dict["dateAdded"]
|
||||
except:
|
||||
logger.info("Detected new backup Schema, skipping migration...")
|
||||
return recipe_dict
|
||||
# Migration from list to Object Type Data
|
||||
if type(recipe_dict["extras"]) == list:
|
||||
recipe_dict["extras"] = {}
|
||||
|
||||
return recipe_dict
|
||||
|
||||
def _import_images(self, successful_imports: List[str]):
|
||||
image_dir = self.import_dir.joinpath("images")
|
||||
for image in image_dir.iterdir():
|
||||
if image.stem in successful_imports:
|
||||
shutil.copy(image, IMG_DIR)
|
||||
|
||||
def import_themes(self):
|
||||
themes_file = self.import_dir.joinpath("themes", "themes.json")
|
||||
|
||||
with open(themes_file, "r") as f:
|
||||
themes: list = json.loads(f.read())
|
||||
for theme in themes:
|
||||
new_theme = SiteTheme(**theme)
|
||||
try:
|
||||
new_theme.save_to_db()
|
||||
except:
|
||||
logger.info(f"Unable Import Theme {new_theme.name}")
|
||||
|
||||
def import_settings(self):
|
||||
settings_file = self.import_dir.joinpath("settings", "settings.json")
|
||||
|
||||
with open(settings_file, "r") as f:
|
||||
settings: dict = json.loads(f.read())
|
||||
|
||||
settings = SiteSettings(**settings)
|
||||
|
||||
settings.update()
|
||||
|
||||
def clean_up(self):
|
||||
shutil.rmtree(TEMP_DIR)
|
||||
@@ -2,13 +2,12 @@ import shutil
|
||||
from pathlib import Path
|
||||
|
||||
import requests
|
||||
from fastapi.responses import FileResponse
|
||||
|
||||
CWD = Path(__file__).parent
|
||||
IMG_DIR = CWD.parent.joinpath("data", "img")
|
||||
|
||||
|
||||
def read_image(recipe_slug: str) -> FileResponse:
|
||||
def read_image(recipe_slug: str) -> Path:
|
||||
if IMG_DIR.joinpath(recipe_slug).is_file():
|
||||
return IMG_DIR.joinpath(recipe_slug)
|
||||
else:
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import json
|
||||
from datetime import date, timedelta
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
from db.meal_models import MealDocument, MealPlanDocument
|
||||
from db.database import db
|
||||
from pydantic import BaseModel
|
||||
|
||||
from services.recipe_services import Recipe
|
||||
@@ -80,73 +79,30 @@ class MealPlan(BaseModel):
|
||||
self.meals = meals
|
||||
|
||||
def save_to_db(self):
|
||||
meal_docs = []
|
||||
for meal in self.meals:
|
||||
meal = meal.dict()
|
||||
meal_doc = MealDocument(**meal)
|
||||
meal_docs.append(meal_doc)
|
||||
|
||||
self.meals = meal_docs
|
||||
|
||||
meal_plan = MealPlanDocument(**self.dict())
|
||||
|
||||
meal_plan.save()
|
||||
db.meals.save_new(self.dict())
|
||||
|
||||
@staticmethod
|
||||
def get_all() -> List:
|
||||
all_meals = []
|
||||
for plan in MealPlanDocument.objects.order_by("startDate"):
|
||||
all_meals.append(MealPlan._unpack_doc(plan))
|
||||
|
||||
print(all_meals)
|
||||
all_meals = [MealPlan(**x) for x in db.meals.get_all(order_by="startDate")]
|
||||
|
||||
return all_meals
|
||||
|
||||
def update(self, uid):
|
||||
document = MealPlanDocument.objects.get(uid=uid)
|
||||
|
||||
meal_docs = []
|
||||
for meal in self.meals:
|
||||
meal = meal.dict()
|
||||
meal_doc = MealDocument(**meal)
|
||||
meal_docs.append(meal_doc)
|
||||
|
||||
self.meals = meal_docs
|
||||
if document:
|
||||
document.update(set__meals=self.meals)
|
||||
document.save()
|
||||
db.meals.update(uid, self.dict())
|
||||
|
||||
@staticmethod
|
||||
def delete(uid):
|
||||
document = MealPlanDocument.objects.get(uid=uid)
|
||||
|
||||
if document:
|
||||
document.delete()
|
||||
|
||||
@staticmethod
|
||||
def _unpack_doc(document: MealPlanDocument):
|
||||
meal_plan = json.loads(document.to_json())
|
||||
del meal_plan["_id"]["$oid"]
|
||||
print(meal_plan)
|
||||
meal_plan["uid"] = meal_plan["uid"]["$uuid"]
|
||||
|
||||
meal_plan["startDate"] = meal_plan["startDate"]["$date"]
|
||||
meal_plan["endDate"] = meal_plan["endDate"]["$date"]
|
||||
|
||||
meals = []
|
||||
for meal in meal_plan["meals"]:
|
||||
meal["date"] = meal["date"]["$date"]
|
||||
meals.append(Meal(**meal))
|
||||
|
||||
meal_plan["meals"] = meals
|
||||
return MealPlan(**meal_plan)
|
||||
db.meals.delete(uid)
|
||||
|
||||
@staticmethod
|
||||
def today() -> str:
|
||||
""" Returns the meal slug for Today """
|
||||
meal_plan = MealPlanDocument.objects.order_by("startDate").limit(1)
|
||||
meal_plan = MealPlan._unpack_doc(meal_plan[0])
|
||||
meal_plan = db.meals.get_all(limit=1, order_by="startDate")
|
||||
|
||||
for meal in meal_plan.meals:
|
||||
meal_docs = [Meal(**meal) for meal in meal_plan["meals"]]
|
||||
|
||||
for meal in meal_docs:
|
||||
if meal.date == date.today():
|
||||
return meal.slug
|
||||
|
||||
@@ -154,7 +110,6 @@ class MealPlan(BaseModel):
|
||||
|
||||
@staticmethod
|
||||
def this_week():
|
||||
meal_plan = MealPlanDocument.objects.order_by("startDate").limit(1)
|
||||
meal_plan = MealPlan._unpack_doc(meal_plan[0])
|
||||
meal_plan = db.meals.get_all(limit=1, order_by="startDate")
|
||||
|
||||
return meal_plan
|
||||
|
||||
@@ -3,8 +3,8 @@ from pathlib import Path
|
||||
|
||||
import git
|
||||
import yaml
|
||||
from services.image_services import IMG_DIR
|
||||
from services.recipe_services import Recipe
|
||||
from app_config import IMG_DIR
|
||||
|
||||
try:
|
||||
from yaml import CLoader as Loader
|
||||
|
||||
@@ -4,11 +4,11 @@ import shutil
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
|
||||
from services.recipe_services import IMG_DIR, Recipe
|
||||
from services.recipe_services import Recipe
|
||||
from services.scrape_services import normalize_data, process_recipe_data
|
||||
from app_config import IMG_DIR, TEMP_DIR
|
||||
|
||||
CWD = Path(__file__).parent
|
||||
TEMP_DIR = CWD.parent.parent.joinpath("data", "temp")
|
||||
MIGRTAION_DIR = CWD.parent.parent.joinpath("data", "migration")
|
||||
|
||||
|
||||
|
||||
@@ -3,16 +3,12 @@ import json
|
||||
from pathlib import Path
|
||||
from typing import Any, List, Optional
|
||||
|
||||
from db.recipe_models import RecipeDocument
|
||||
from db.database import db
|
||||
from pydantic import BaseModel, validator
|
||||
from slugify import slugify
|
||||
|
||||
from services.image_services import delete_image
|
||||
|
||||
CWD = Path(__file__).parent
|
||||
ALL_RECIPES = CWD.parent.joinpath("data", "all_recipes.json")
|
||||
IMG_DIR = CWD.parent.joinpath("data", "img")
|
||||
|
||||
|
||||
class RecipeNote(BaseModel):
|
||||
title: str
|
||||
@@ -31,7 +27,10 @@ class Recipe(BaseModel):
|
||||
recipeYield: Optional[str]
|
||||
recipeIngredient: Optional[list]
|
||||
recipeInstructions: Optional[list]
|
||||
totalTime: Optional[Any]
|
||||
|
||||
totalTime: Optional[str] = None
|
||||
prepTime: Optional[str] = None
|
||||
performTime: Optional[str] = None
|
||||
|
||||
# Mealie Specific
|
||||
slug: Optional[str] = ""
|
||||
@@ -67,9 +66,7 @@ class Recipe(BaseModel):
|
||||
"notes": [{"title": "Watch Out!", "text": "Prep the day before!"}],
|
||||
"orgURL": "https://www.bonappetit.com/recipe/chicken-and-rice-with-leeks-and-salsa-verde",
|
||||
"rating": 3,
|
||||
"extras": {
|
||||
"message": "Don't forget to defrost the chicken!"
|
||||
}
|
||||
"extras": {"message": "Don't forget to defrost the chicken!"},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -94,12 +91,12 @@ class Recipe(BaseModel):
|
||||
return cls(**document)
|
||||
|
||||
@classmethod
|
||||
def get_by_slug(_cls, slug: str):
|
||||
""" Returns a recipe dictionary from the slug """
|
||||
def get_by_slug(cls, slug: str):
|
||||
""" Returns a Recipe Object by Slug """
|
||||
|
||||
document = RecipeDocument.objects.get(slug=slug)
|
||||
document = db.recipes.get(slug, "slug")
|
||||
|
||||
return Recipe._unpack_doc(document)
|
||||
return cls(**document)
|
||||
|
||||
def save_to_db(self) -> str:
|
||||
recipe_dict = self.dict()
|
||||
@@ -110,47 +107,36 @@ class Recipe(BaseModel):
|
||||
except:
|
||||
recipe_dict["image"] = "no image"
|
||||
|
||||
try:
|
||||
total_time = recipe_dict.get("totalTime")
|
||||
recipe_dict["totalTime"] = str(total_time)
|
||||
except:
|
||||
pass
|
||||
# try:
|
||||
# total_time = recipe_dict.get("totalTime")
|
||||
# recipe_dict["totalTime"] = str(total_time)
|
||||
# except:
|
||||
# pass
|
||||
|
||||
recipeDoc = RecipeDocument(**recipe_dict)
|
||||
recipeDoc.save()
|
||||
recipe_doc = db.recipes.save_new(recipe_dict)
|
||||
recipe = Recipe(**recipe_doc)
|
||||
|
||||
return recipeDoc.slug
|
||||
return recipe.slug
|
||||
|
||||
@staticmethod
|
||||
def delete(recipe_slug: str) -> str:
|
||||
""" Removes the recipe from the database by slug """
|
||||
delete_image(recipe_slug)
|
||||
document = RecipeDocument.objects.get(slug=recipe_slug)
|
||||
|
||||
if document:
|
||||
document.delete()
|
||||
return "Document Deleted"
|
||||
db.recipes.delete(recipe_slug)
|
||||
return "Document Deleted"
|
||||
|
||||
def update(self, recipe_slug: str):
|
||||
""" Updates the recipe from the database by slug"""
|
||||
document = RecipeDocument.objects.get(slug=recipe_slug)
|
||||
updated_slug = db.recipes.update(recipe_slug, self.dict())
|
||||
return updated_slug.get("slug")
|
||||
|
||||
if document:
|
||||
document.update(set__name=self.name)
|
||||
document.update(set__description=self.description)
|
||||
document.update(set__image=self.image)
|
||||
document.update(set__recipeYield=self.recipeYield)
|
||||
document.update(set__recipeIngredient=self.recipeIngredient)
|
||||
document.update(set__recipeInstructions=self.recipeInstructions)
|
||||
document.update(set__totalTime=self.totalTime)
|
||||
@staticmethod
|
||||
def update_image(slug: str, extension: str):
|
||||
db.recipes.update_image(slug, extension)
|
||||
|
||||
document.update(set__categories=self.categories)
|
||||
document.update(set__tags=self.tags)
|
||||
document.update(set__notes=self.notes)
|
||||
document.update(set__orgURL=self.orgURL)
|
||||
document.update(set__rating=self.rating)
|
||||
document.update(set__extras=self.extras)
|
||||
document.save()
|
||||
@staticmethod
|
||||
def get_all():
|
||||
return db.recipes.get_all()
|
||||
|
||||
|
||||
def read_requested_values(keys: list, max_results: int = 0) -> List[dict]:
|
||||
@@ -166,7 +152,7 @@ def read_requested_values(keys: list, max_results: int = 0) -> List[dict]:
|
||||
|
||||
"""
|
||||
recipe_list = []
|
||||
for recipe in RecipeDocument.objects.order_by("dateAdded").limit(max_results):
|
||||
for recipe in db.recipes.get_all(limit=max_results, order_by="dateAdded"):
|
||||
recipe_details = {}
|
||||
for key in keys:
|
||||
try:
|
||||
|
||||
@@ -5,7 +5,7 @@ import requests
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from utils.logger import logger
|
||||
|
||||
from services.backup_services import auto_backup_job
|
||||
from services.backups.exports import auto_backup_job
|
||||
from services.meal_services import MealPlan
|
||||
from services.recipe_services import Recipe
|
||||
from services.settings_services import SiteSettings
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
from typing import List, Tuple
|
||||
|
||||
from scrape_schema_recipe import scrape_url
|
||||
import extruct
|
||||
import requests
|
||||
import scrape_schema_recipe
|
||||
from slugify import slugify
|
||||
from utils.logger import logger
|
||||
from w3lib.html import get_base_url
|
||||
|
||||
from services.image_services import scrape_image
|
||||
from services.recipe_services import Recipe
|
||||
@@ -59,21 +62,10 @@ def normalize_data(recipe_data: dict) -> dict:
|
||||
recipe_data["recipeInstructions"] = normalize_instructions(
|
||||
recipe_data["recipeInstructions"]
|
||||
)
|
||||
recipe_data["image"] = normalize_image_url(recipe_data["image"])
|
||||
return recipe_data
|
||||
|
||||
|
||||
def create_from_url(url: str) -> dict:
|
||||
recipe_data = process_recipe_url(url)
|
||||
|
||||
with open(TEMP_FILE, "w") as f:
|
||||
f.write(json.dumps(recipe_data, indent=4, default=str))
|
||||
|
||||
recipe_data = normalize_data(recipe_data)
|
||||
recipe = Recipe(**recipe_data)
|
||||
|
||||
return recipe.save_to_db()
|
||||
|
||||
|
||||
def process_recipe_data(new_recipe: dict, url=None) -> dict:
|
||||
slug = slugify(new_recipe["name"])
|
||||
mealie_tags = {
|
||||
@@ -91,21 +83,84 @@ def process_recipe_data(new_recipe: dict, url=None) -> dict:
|
||||
return new_recipe
|
||||
|
||||
|
||||
def process_recipe_url(url: str) -> dict:
|
||||
new_recipe: dict = scrape_url(url, python_objects=True)[0]
|
||||
logger.info(f"Recipe Scraped From Web: {new_recipe}")
|
||||
def extract_recipe_from_html(html: str, url: str) -> dict:
|
||||
scraped_recipes: List[dict] = scrape_schema_recipe.loads(html, python_objects=True)
|
||||
|
||||
if not new_recipe:
|
||||
return "fail" # TODO: Return Better Error Here
|
||||
|
||||
new_recipe = process_recipe_data(new_recipe, url)
|
||||
|
||||
try:
|
||||
img_path = scrape_image(
|
||||
normalize_image_url(new_recipe.get("image")), new_recipe.get("slug")
|
||||
if not scraped_recipes:
|
||||
scraped_recipes: List[dict] = scrape_schema_recipe.scrape_url(
|
||||
url, python_objects=True
|
||||
)
|
||||
new_recipe["image"] = img_path.name
|
||||
except:
|
||||
new_recipe["image"] = None
|
||||
|
||||
if scraped_recipes:
|
||||
new_recipe: dict = scraped_recipes[0]
|
||||
logger.info(f"Recipe Scraped From Web: {new_recipe}")
|
||||
|
||||
if not new_recipe:
|
||||
return "fail" # TODO: Return Better Error Here
|
||||
|
||||
new_recipe = process_recipe_data(new_recipe, url=url)
|
||||
new_recipe = normalize_data(new_recipe)
|
||||
else:
|
||||
new_recipe = basic_recipe_from_opengraph(html, url)
|
||||
logger.info(f"Recipe Scraped from opengraph metadata: {new_recipe}")
|
||||
|
||||
return new_recipe
|
||||
|
||||
|
||||
def download_image_for_recipe(recipe: dict) -> dict:
|
||||
try:
|
||||
img_path = scrape_image(recipe.get("image"), recipe.get("slug"))
|
||||
recipe["image"] = img_path.name
|
||||
except:
|
||||
recipe["image"] = None
|
||||
|
||||
return recipe
|
||||
|
||||
|
||||
def og_field(properties: dict, field_name: str) -> str:
|
||||
return next((val for name, val in properties if name == field_name), None)
|
||||
|
||||
|
||||
def og_fields(properties: List[Tuple[str, str]], field_name: str) -> List[str]:
|
||||
return list({val for name, val in properties if name == field_name})
|
||||
|
||||
|
||||
def basic_recipe_from_opengraph(html: str, url: str) -> dict:
|
||||
base_url = get_base_url(html, url)
|
||||
data = extruct.extract(html, base_url=base_url)
|
||||
properties = data["opengraph"][0]["properties"]
|
||||
return {
|
||||
"name": og_field(properties, "og:title"),
|
||||
"description": og_field(properties, "og:description"),
|
||||
"image": og_field(properties, "og:image"),
|
||||
"recipeYield": "",
|
||||
# FIXME: If recipeIngredient is an empty list, mongodb's data verification fails.
|
||||
"recipeIngredient": ["Could not detect ingredients"],
|
||||
# FIXME: recipeInstructions is allowed to be empty but message this is added for user sanity.
|
||||
"recipeInstructions": [{"text": "Could not detect instructions"}],
|
||||
"slug": slugify(og_field(properties, "og:title")),
|
||||
"orgURL": og_field(properties, "og:url"),
|
||||
"categories": [],
|
||||
"tags": og_fields(properties, "og:article:tag"),
|
||||
"dateAdded": None,
|
||||
"notes": [],
|
||||
"extras": [],
|
||||
}
|
||||
|
||||
|
||||
def process_recipe_url(url: str) -> dict:
|
||||
r = requests.get(url)
|
||||
new_recipe = extract_recipe_from_html(r.text, url)
|
||||
new_recipe = download_image_for_recipe(new_recipe)
|
||||
return new_recipe
|
||||
|
||||
|
||||
def create_from_url(url: str) -> dict:
|
||||
recipe_data = process_recipe_url(url)
|
||||
|
||||
with open(TEMP_FILE, "w") as f:
|
||||
f.write(json.dumps(recipe_data, indent=4, default=str))
|
||||
|
||||
recipe = Recipe(**recipe_data)
|
||||
|
||||
return recipe.save_to_db()
|
||||
|
||||
@@ -1,23 +1,15 @@
|
||||
import json
|
||||
from typing import List, Optional
|
||||
|
||||
from db.settings_models import (
|
||||
SiteSettingsDocument,
|
||||
SiteThemeDocument,
|
||||
ThemeColorsDocument,
|
||||
WebhooksDocument,
|
||||
)
|
||||
from db.database import db
|
||||
from db.db_setup import sql_exists
|
||||
from pydantic import BaseModel
|
||||
from utils.logger import logger
|
||||
|
||||
|
||||
class Webhooks(BaseModel):
|
||||
webhookTime: str
|
||||
webhookURLs: Optional[List[str]]
|
||||
enabled: bool
|
||||
|
||||
@staticmethod
|
||||
def run():
|
||||
pass
|
||||
webhookTime: str = "00:00"
|
||||
webhookURLs: Optional[List[str]] = []
|
||||
enabled: bool = False
|
||||
|
||||
|
||||
class SiteSettings(BaseModel):
|
||||
@@ -37,30 +29,22 @@ class SiteSettings(BaseModel):
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _unpack_doc(document: SiteSettingsDocument):
|
||||
document = json.loads(document.to_json())
|
||||
del document["_id"]
|
||||
document["webhhooks"] = Webhooks(**document["webhooks"])
|
||||
return SiteSettings(**document)
|
||||
def get_all():
|
||||
db.settings.get_all()
|
||||
|
||||
@staticmethod
|
||||
def get_site_settings():
|
||||
@classmethod
|
||||
def get_site_settings(cls):
|
||||
try:
|
||||
document = SiteSettingsDocument.objects.get(name="main")
|
||||
document = db.settings.get("main")
|
||||
except:
|
||||
webhooks = WebhooksDocument()
|
||||
document = SiteSettingsDocument(name="main", webhooks=webhooks)
|
||||
document.save()
|
||||
webhooks = Webhooks()
|
||||
default_entry = SiteSettings(name="main", webhooks=webhooks)
|
||||
document = db.settings.save_new(default_entry.dict(), webhooks.dict())
|
||||
|
||||
return SiteSettings._unpack_doc(document)
|
||||
return cls(**document)
|
||||
|
||||
def update(self):
|
||||
document = SiteSettingsDocument.objects.get(name="main")
|
||||
new_webhooks = WebhooksDocument(**self.webhooks.dict())
|
||||
|
||||
document.update(set__webhooks=new_webhooks)
|
||||
|
||||
document.save()
|
||||
db.settings.update("main", new_data=self.dict())
|
||||
|
||||
|
||||
class Colors(BaseModel):
|
||||
@@ -93,50 +77,67 @@ class SiteTheme(BaseModel):
|
||||
}
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def get_by_name(theme_name):
|
||||
document = SiteThemeDocument.objects.get(name=theme_name)
|
||||
return SiteTheme._unpack_doc(document)
|
||||
@classmethod
|
||||
def get_by_name(cls, theme_name):
|
||||
db_entry = db.themes.get(theme_name)
|
||||
name = db_entry.get("name")
|
||||
colors = Colors(**db_entry.get("colors"))
|
||||
|
||||
@staticmethod
|
||||
def _unpack_doc(document):
|
||||
document = json.loads(document.to_json())
|
||||
del document["_id"]
|
||||
theme_colors = SiteTheme(**document)
|
||||
return theme_colors
|
||||
return cls(name=name, colors=colors)
|
||||
|
||||
@staticmethod
|
||||
def get_all():
|
||||
all_themes = []
|
||||
for theme in SiteThemeDocument.objects():
|
||||
all_themes.append(SiteTheme._unpack_doc(theme))
|
||||
all_themes = db.themes.get_all()
|
||||
for index, theme in enumerate(all_themes):
|
||||
name = theme.get("name")
|
||||
colors = Colors(**theme.get("colors"))
|
||||
|
||||
all_themes[index] = SiteTheme(name=name, colors=colors)
|
||||
|
||||
return all_themes
|
||||
|
||||
def save_to_db(self):
|
||||
theme = self.dict()
|
||||
theme["colors"] = ThemeColorsDocument(**theme["colors"])
|
||||
|
||||
theme_document = SiteThemeDocument(**theme)
|
||||
|
||||
theme_document.save()
|
||||
db.themes.save_new(self.dict())
|
||||
|
||||
def update_document(self):
|
||||
theme = self.dict()
|
||||
theme["colors"] = ThemeColorsDocument(**theme["colors"])
|
||||
|
||||
theme_document = SiteThemeDocument.objects.get(name=self.name)
|
||||
|
||||
if theme_document:
|
||||
theme_document.update(set__colors=theme["colors"])
|
||||
|
||||
theme_document.save()
|
||||
db.themes.update(self.dict())
|
||||
|
||||
@staticmethod
|
||||
def delete_theme(theme_name: str) -> str:
|
||||
""" Removes the theme by name """
|
||||
document = SiteThemeDocument.objects.get(name=theme_name)
|
||||
db.themes.delete(theme_name)
|
||||
|
||||
if document:
|
||||
document.delete()
|
||||
return "Document Deleted"
|
||||
|
||||
def default_theme_init():
|
||||
default_colors = {
|
||||
"primary": "#E58325",
|
||||
"accent": "#00457A",
|
||||
"secondary": "#973542",
|
||||
"success": "#5AB1BB",
|
||||
"info": "#4990BA",
|
||||
"warning": "#FF4081",
|
||||
"error": "#EF5350",
|
||||
}
|
||||
|
||||
try:
|
||||
SiteTheme.get_by_name("default")
|
||||
logger.info("Default theme exists... skipping generation")
|
||||
except:
|
||||
logger.info("Generating Default Theme")
|
||||
colors = Colors(**default_colors)
|
||||
default_theme = SiteTheme(name="default", colors=colors)
|
||||
default_theme.save_to_db()
|
||||
|
||||
|
||||
def default_settings_init():
|
||||
try:
|
||||
document = db.settings.get("main")
|
||||
except:
|
||||
webhooks = Webhooks()
|
||||
default_entry = SiteSettings(name="main", webhooks=webhooks)
|
||||
document = db.settings.save_new(default_entry.dict(), webhooks.dict())
|
||||
|
||||
|
||||
if not sql_exists:
|
||||
default_settings_init()
|
||||
default_theme_init()
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
from services.settings_services import Colors, SiteTheme
|
||||
from utils.logger import logger
|
||||
|
||||
CWD = Path(__file__).parent
|
||||
DATA_DIR = CWD.joinpath("data")
|
||||
TEMP_DIR = CWD.joinpath("data", "temp")
|
||||
|
||||
|
||||
def ensure_dirs():
|
||||
DATA_DIR.mkdir(parents=True, exist_ok=True)
|
||||
DATA_DIR.joinpath("img").mkdir(parents=True, exist_ok=True)
|
||||
DATA_DIR.joinpath("backups").mkdir(parents=True, exist_ok=True)
|
||||
DATA_DIR.joinpath("templates").mkdir(parents=True, exist_ok=True)
|
||||
DATA_DIR.joinpath("debug").mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def generate_default_theme():
|
||||
default_colors = {
|
||||
"primary": "#E58325",
|
||||
"accent": "#00457A",
|
||||
"secondary": "#973542",
|
||||
"success": "#5AB1BB",
|
||||
"info": "#4990BA",
|
||||
"warning": "#FF4081",
|
||||
"error": "#EF5350",
|
||||
}
|
||||
|
||||
try:
|
||||
SiteTheme.get_by_name("default")
|
||||
return "default theme exists"
|
||||
except:
|
||||
logger.info("Generating Default Theme")
|
||||
colors = Colors(**default_colors)
|
||||
default_theme = SiteTheme(name="default", colors=colors)
|
||||
default_theme.save_to_db()
|
||||
|
||||
|
||||
"""Script to export the ReDoc documentation page into a standalone HTML file."""
|
||||
|
||||
HTML_TEMPLATE = """<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="content-type" content="text/html; charset=UTF-8">
|
||||
<title>My Project - ReDoc</title>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<link rel="shortcut icon" href="https://fastapi.tiangolo.com/img/favicon.png">
|
||||
<style>
|
||||
body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
</style>
|
||||
<style data-styled="" data-styled-version="4.4.1"></style>
|
||||
</head>
|
||||
<body>
|
||||
<div id="redoc-container"></div>
|
||||
<script src="https://cdn.jsdelivr.net/npm/redoc/bundles/redoc.standalone.js"> </script>
|
||||
<script>
|
||||
var spec = %s;
|
||||
Redoc.init(spec, {}, document.getElementById("redoc-container"));
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
CWD = Path(__file__).parent
|
||||
out_path = CWD.joinpath("temp", "index.html")
|
||||
|
||||
|
||||
def generate_api_docs(app):
|
||||
with open(out_path, "w") as fd:
|
||||
out_path.parent.mkdir(exist_ok=True)
|
||||
print(HTML_TEMPLATE % json.dumps(app.openapi()), file=fd)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pass
|
||||
@@ -1,39 +0,0 @@
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from services.scrape_services import normalize_data, normalize_instructions
|
||||
|
||||
CWD = Path(__file__).parent
|
||||
RAW_RECIPE_DIR = CWD.joinpath("data", "recipes-raw")
|
||||
|
||||
|
||||
@pytest.mark.parametrize("json_file,num_steps", [
|
||||
("best-homemade-salsa-recipe.json", 2),
|
||||
("blue-cheese-stuffed-turkey-meatballs-with-raspberry-balsamic-glaze-2.json", 3),
|
||||
("bon_appetit.json", 8),
|
||||
("chunky-apple-cake.json", 4),
|
||||
("dairy-free-impossible-pumpkin-pie.json", 7),
|
||||
("how-to-make-instant-pot-spaghetti.json", 8),
|
||||
("instant-pot-chicken-and-potatoes.json", 4),
|
||||
("instant-pot-kerala-vegetable-stew.json", 13),
|
||||
("jalapeno-popper-dip.json", 4),
|
||||
("microwave_sweet_potatoes_04783.json", 4),
|
||||
("moroccan-skirt-steak-with-roasted-pepper-couscous.json", 4),
|
||||
("Pizza-Knoblauch-Champignon-Paprika-vegan.html.json", 3),
|
||||
])
|
||||
def test_normalize_data(json_file, num_steps):
|
||||
recipe_data = normalize_data(json.load(open(RAW_RECIPE_DIR.joinpath(json_file))))
|
||||
assert len(recipe_data["recipeInstructions"]) == num_steps
|
||||
|
||||
|
||||
@pytest.mark.parametrize("instructions", [
|
||||
"A\n\nB\n\nC\n\n",
|
||||
"A\nB\nC\n",
|
||||
"A\r\n\r\nB\r\n\r\nC\r\n\r\n",
|
||||
"A\r\nB\r\nC\r\n",
|
||||
["A","B","C"],
|
||||
[{"@type": "HowToStep", "text": x} for x in ["A","B","C"]]
|
||||
])
|
||||
def test_normalize_instructions(instructions):
|
||||
assert normalize_instructions(instructions) == [{"text": "A"}, {"text": "B"}, {"text": "C"}]
|
||||
2
mealie/tests/conftest.py
Normal file
@@ -0,0 +1,2 @@
|
||||
import db.db_setup
|
||||
from pytest import fixture
|
||||
500
mealie/tests/data/html-raw/healthy_pasta_bake_60759.html
Normal file
|
Before Width: | Height: | Size: 5.7 KiB After Width: | Height: | Size: 5.7 KiB |
|
Before Width: | Height: | Size: 10 KiB After Width: | Height: | Size: 10 KiB |
4
mealie/tests/pytest.ini
Normal file
@@ -0,0 +1,4 @@
|
||||
[pytest]
|
||||
python_files = test_*
|
||||
python_classes = *Tests
|
||||
python_functions = test_*
|
||||
@@ -1,5 +1,5 @@
|
||||
from pathlib import Path
|
||||
|
||||
from app_config import TEMP_DIR
|
||||
import pytest
|
||||
from services.image_services import IMG_DIR
|
||||
from services.migrations.nextcloud import (
|
||||
@@ -11,8 +11,8 @@ from services.migrations.nextcloud import (
|
||||
from services.recipe_services import Recipe
|
||||
|
||||
CWD = Path(__file__).parent
|
||||
NEXTCLOUD_DIR = CWD.joinpath("data", "nextcloud_recipes")
|
||||
TEMP_NEXTCLOUD = CWD.parent.joinpath("data", "temp", "nextcloud")
|
||||
NEXTCLOUD_DIR = CWD.parent.joinpath("data", "nextcloud_recipes")
|
||||
TEMP_NEXTCLOUD = TEMP_DIR.joinpath("nextcloud")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -39,5 +39,5 @@ def test_zip_extraction(file_name: str, final_path: Path):
|
||||
)
|
||||
def test_nextcloud_migration(recipe_dir: Path):
|
||||
recipe = import_recipes(recipe_dir)
|
||||
assert type(recipe) == Recipe
|
||||
assert isinstance(recipe, Recipe)
|
||||
IMG_DIR.joinpath(recipe.image).unlink(missing_ok=True)
|
||||
0
mealie/tests/test_recipes/__init__.py
Normal file
99
mealie/tests/test_recipes/test_scraper.py
Normal file
@@ -0,0 +1,99 @@
|
||||
import json
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from services.scrape_services import (
|
||||
extract_recipe_from_html,
|
||||
normalize_data,
|
||||
normalize_instructions,
|
||||
)
|
||||
|
||||
CWD = Path(__file__).parent
|
||||
RAW_RECIPE_DIR = CWD.parent.joinpath("data", "recipes-raw")
|
||||
RAW_HTML_DIR = CWD.parent.joinpath("data", "html-raw")
|
||||
|
||||
# https://github.com/django/django/blob/stable/1.3.x/django/core/validators.py#L45
|
||||
url_validation_regex = re.compile(
|
||||
r"^(?:http|ftp)s?://" # http:// or https://
|
||||
r"(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|" # domain...
|
||||
r"localhost|" # localhost...
|
||||
r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})" # ...or ip
|
||||
r"(?::\d+)?" # optional port
|
||||
r"(?:/?|[/?]\S+)$",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"json_file,num_steps",
|
||||
[
|
||||
("best-homemade-salsa-recipe.json", 2),
|
||||
(
|
||||
"blue-cheese-stuffed-turkey-meatballs-with-raspberry-balsamic-glaze-2.json",
|
||||
3,
|
||||
),
|
||||
("bon_appetit.json", 8),
|
||||
("chunky-apple-cake.json", 4),
|
||||
("dairy-free-impossible-pumpkin-pie.json", 7),
|
||||
("how-to-make-instant-pot-spaghetti.json", 8),
|
||||
("instant-pot-chicken-and-potatoes.json", 4),
|
||||
("instant-pot-kerala-vegetable-stew.json", 13),
|
||||
("jalapeno-popper-dip.json", 4),
|
||||
("microwave_sweet_potatoes_04783.json", 4),
|
||||
("moroccan-skirt-steak-with-roasted-pepper-couscous.json", 4),
|
||||
("Pizza-Knoblauch-Champignon-Paprika-vegan.html.json", 3),
|
||||
],
|
||||
)
|
||||
def test_normalize_data(json_file, num_steps):
|
||||
recipe_data = normalize_data(json.load(open(RAW_RECIPE_DIR.joinpath(json_file))))
|
||||
assert len(recipe_data["recipeInstructions"]) == num_steps
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"instructions",
|
||||
[
|
||||
"A\n\nB\n\nC\n\n",
|
||||
"A\nB\nC\n",
|
||||
"A\r\n\r\nB\r\n\r\nC\r\n\r\n",
|
||||
"A\r\nB\r\nC\r\n",
|
||||
["A", "B", "C"],
|
||||
[{"@type": "HowToStep", "text": x} for x in ["A", "B", "C"]],
|
||||
],
|
||||
)
|
||||
def test_normalize_instructions(instructions):
|
||||
assert normalize_instructions(instructions) == [
|
||||
{"text": "A"},
|
||||
{"text": "B"},
|
||||
{"text": "C"},
|
||||
]
|
||||
|
||||
|
||||
def test_html_no_recipe_data():
|
||||
path = RAW_HTML_DIR.joinpath("carottes-rapps-with-rice-and-sunflower-seeds.html")
|
||||
url = "https://www.feedtheswimmers.com/blog/2019/6/5/carottes-rapps-with-rice-and-sunflower-seeds"
|
||||
recipe_data = extract_recipe_from_html(open(path).read(), url)
|
||||
|
||||
assert len(recipe_data["name"]) > 10
|
||||
assert len(recipe_data["slug"]) > 10
|
||||
assert recipe_data["orgURL"] == url
|
||||
assert len(recipe_data["description"]) > 100
|
||||
assert url_validation_regex.match(recipe_data["image"])
|
||||
assert recipe_data["recipeIngredient"] == ["Could not detect ingredients"]
|
||||
assert recipe_data["recipeInstructions"] == [
|
||||
{"text": "Could not detect instructions"}
|
||||
]
|
||||
|
||||
|
||||
def test_html_with_recipe_data():
|
||||
path = RAW_HTML_DIR.joinpath("healthy_pasta_bake_60759.html")
|
||||
url = "https://www.bbc.co.uk/food/recipes/healthy_pasta_bake_60759"
|
||||
recipe_data = extract_recipe_from_html(open(path).read(), url)
|
||||
|
||||
assert len(recipe_data["name"]) > 10
|
||||
assert len(recipe_data["slug"]) > 10
|
||||
assert recipe_data["orgURL"] == url
|
||||
assert len(recipe_data["description"]) > 100
|
||||
assert url_validation_regex.match(recipe_data["image"])
|
||||
assert len(recipe_data["recipeIngredient"]) == 13
|
||||
assert len(recipe_data["recipeInstructions"]) == 4
|
||||
41
mealie/utils/api_docs.py
Normal file
@@ -0,0 +1,41 @@
|
||||
import json
|
||||
|
||||
from app_config import BASE_DIR
|
||||
|
||||
"""Script to export the ReDoc documentation page into a standalone HTML file."""
|
||||
|
||||
HTML_TEMPLATE = """<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="content-type" content="text/html; charset=UTF-8">
|
||||
<title>My Project - ReDoc</title>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<link rel="shortcut icon" href="https://fastapi.tiangolo.com/img/favicon.png">
|
||||
<style>
|
||||
body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
</style>
|
||||
<style data-styled="" data-styled-version="4.4.1"></style>
|
||||
</head>
|
||||
<body>
|
||||
<div id="redoc-container"></div>
|
||||
<script src="https://cdn.jsdelivr.net/npm/redoc/bundles/redoc.standalone.js"> </script>
|
||||
<script>
|
||||
var spec = %s;
|
||||
Redoc.init(spec, {}, document.getElementById("redoc-container"));
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
|
||||
def generate_api_docs(app):
|
||||
out_dir = BASE_DIR.joinpath(".temp")
|
||||
out_dir.mkdir(parents=True, exist_ok=True)
|
||||
out_path = out_dir.joinpath("index.html")
|
||||
with open(out_path, "w") as fd:
|
||||
out_path.parent.mkdir(exist_ok=True)
|
||||
print(HTML_TEMPLATE % json.dumps(app.openapi()), file=fd)
|
||||
11
mealie/utils/global_scheduler.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from services.scheduler_services import Scheduler
|
||||
|
||||
|
||||
def start_scheduler():
|
||||
global scheduler
|
||||
scheduler = Scheduler()
|
||||
scheduler.startup_scheduler()
|
||||
return scheduler
|
||||
|
||||
|
||||
scheduler = start_scheduler()
|
||||
17
mealie/utils/startup.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from pathlib import Path
|
||||
|
||||
from app_config import REQUIRED_DIRS
|
||||
from services.settings_services import default_theme_init
|
||||
|
||||
CWD = Path(__file__).parent
|
||||
|
||||
|
||||
def post_start():
|
||||
default_theme_init()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pass
|
||||