Bug/misc fixes (#618)

* Fixes #617

* set recipe settings default by env variables

* add variables to docker-compse

* update changelog

* bump dependencies

* add fallback name to scraper

Co-authored-by: hay-kot <hay-kot@pm.me>
This commit is contained in:
Hayden
2021-07-05 16:05:32 -08:00
committed by GitHub
parent 3e2c9f41cf
commit 9b5cf36981
16 changed files with 361 additions and 235 deletions

View File

@@ -139,14 +139,14 @@ class AppSettings(BaseSettings):
def public_db_url(cls, v: Optional[str], values: dict[str, Any]) -> str:
url = values.get("DB_URL")
engine = values.get("DB_ENGINE", "sqlite")
if engine == "postgres":
user = values.get("POSTGRES_USER")
password = values.get("POSTGRES_PASSWORD")
return url.replace(user, "*****", 1).replace(password, "*****", 1)
else:
if engine != "postgres":
# sqlite
return url
user = values.get("POSTGRES_USER")
password = values.get("POSTGRES_PASSWORD")
return url.replace(user, "*****", 1).replace(password, "*****", 1)
DEFAULT_GROUP: str = "Home"
DEFAULT_EMAIL: str = "changeme@email.com"
DEFAULT_PASSWORD: str = "MyPassword"
@@ -157,6 +157,14 @@ class AppSettings(BaseSettings):
SFTP_USERNAME: Optional[str]
SFTP_PASSWORD: Optional[str]
# Recipe Default Settings
RECIPE_PUBLIC: bool = True
RECIPE_SHOW_NUTRITION: bool = True
RECIPE_SHOW_ASSETS: bool = True
RECIPE_LANDSCAPE_VIEW: bool = True
RECIPE_DISABLE_COMMENTS: bool = False
RECIPE_DISABLE_AMOUNT: bool = False
class Config:
env_file = BASE_DIR.joinpath(".env")
env_file_encoding = "utf-8"

View File

@@ -1,7 +1,8 @@
from fastapi import APIRouter
from . import events
from . import defaults, events
about_router = APIRouter(prefix="/api/about")
about_router.include_router(events.router)
about_router.include_router(defaults.router)

View File

@@ -0,0 +1,11 @@
from fastapi import APIRouter
from mealie.schema.recipe import RecipeSettings
router = APIRouter(prefix="/recipes", tags=["About Recipes"])
@router.get("/defaults")
async def get_recipe_settings_defaults():
""" Returns the Default Settings for Recieps as set by ENV variables """
return RecipeSettings()

View File

@@ -4,7 +4,7 @@ from pathlib import Path
from typing import Any, Optional
from fastapi_camelcase import CamelModel
from mealie.core.config import app_dirs
from mealie.core.config import app_dirs, settings
from mealie.db.models.recipe.recipe import RecipeModel
from mealie.schema.comments import CommentOut
from pydantic import BaseModel, Field, validator
@@ -19,12 +19,12 @@ class RecipeImageTypes(str, Enum):
class RecipeSettings(CamelModel):
public: bool = True
show_nutrition: bool = True
show_assets: bool = True
landscape_view: bool = True
disable_comments: bool = False
disable_amount: bool = False
public: bool = settings.RECIPE_PUBLIC
show_nutrition: bool = settings.RECIPE_SHOW_NUTRITION
show_assets: bool = settings.RECIPE_SHOW_ASSETS
landscape_view: bool = settings.RECIPE_LANDSCAPE_VIEW
disable_comments: bool = settings.RECIPE_DISABLE_COMMENTS
disable_amount: bool = settings.RECIPE_DISABLE_AMOUNT
class Config:
orm_mode = True
@@ -130,7 +130,7 @@ class Recipe(RecipeSummary):
perform_time: Optional[str] = None
# Mealie Specific
settings: Optional[RecipeSettings]
settings: Optional[RecipeSettings] = RecipeSettings()
assets: Optional[list[RecipeAsset]] = []
notes: Optional[list[RecipeNote]] = []
org_url: Optional[str] = Field(None, alias="orgURL")

View File

@@ -1,6 +1,7 @@
import json
from enum import Enum
from typing import Any, Callable
from uuid import uuid4
import requests
from fastapi import HTTPException, status
@@ -10,6 +11,7 @@ from mealie.schema.recipe import Recipe, RecipeStep
from mealie.services.image.image import scrape_image
from mealie.services.scraper import cleaner, open_graph
from recipe_scrapers import NoSchemaFoundInWildMode, SchemaScraperFactory, WebsiteNotImplementedError, scrape_me
from slugify import slugify
LAST_JSON = app_dirs.DEBUG_DIR.joinpath("last_recipe.json")
@@ -31,13 +33,17 @@ def create_from_url(url: str) -> Recipe:
logger.info(f"Image {new_recipe.image}")
new_recipe.image = download_image_for_recipe(new_recipe.slug, new_recipe.image)
if new_recipe.name is None or new_recipe.name == "":
new_recipe.name = "No Recipe Found" + uuid4().hex
new_recipe.slug = slugify(new_recipe.name)
return new_recipe
class ParserErrors(str, Enum):
bad_recipe = "BAD_RECIPE_DATA"
no_recipe_data = "NO_RECIPE_DATA"
connection_error = "CONNECTION_ERROR"
BAD_RECIPE_DATA = "BAD_RECIPE_DATA"
NO_RECIPE_DATA = "NO_RECIPE_DATA"
CONNECTION_ERROR = "CONNECTION_ERROR"
def extract_open_graph_values(url) -> Recipe:
@@ -70,10 +76,10 @@ def scrape_from_url(url: str) -> Recipe:
recipe = extract_open_graph_values(url)
if recipe.name != "":
return recipe
raise HTTPException(status.HTTP_400_BAD_REQUEST, {"details": ParserErrors.bad_recipe.value})
raise HTTPException(status.HTTP_400_BAD_REQUEST, {"details": ParserErrors.BAD_RECIPE_DATA.value})
except ConnectionError:
raise HTTPException(status.HTTP_400_BAD_REQUEST, {"details": ParserErrors.connection_error.value})
raise HTTPException(status.HTTP_400_BAD_REQUEST, {"details": ParserErrors.CONNECTION_ERROR.value})
try:
instruct = scraped_schema.instructions()
@@ -86,7 +92,7 @@ def scrape_from_url(url: str) -> Recipe:
ing = []
if not instruct and not ing:
raise HTTPException(status.HTTP_400_BAD_REQUEST, {"details": ParserErrors.no_recipe_data.value})
raise HTTPException(status.HTTP_400_BAD_REQUEST, {"details": ParserErrors.NO_RECIPE_DATA.value})
else:
return clean_scraper(scraped_schema, url)