prs-fleshgolem-2070: feat: sqlalchemy 2.0 (#2096)

* upgrade sqlalchemy to 2.0

* rewrite all db models to sqla 2.0 mapping api

* fix some importing and typing weirdness

* fix types of a lot of nullable columns

* remove get_ref methods

* fix issues found by tests

* rewrite all queries in repository_recipe to 2.0 style

* rewrite all repository queries to 2.0 api

* rewrite all remaining queries to 2.0 api

* remove now-unneeded __allow_unmapped__ flag

* remove and fix some unneeded cases of "# type: ignore"

* fix formatting

* bump black version

* run black

* can this please be the last one. okay. just. okay.

* fix repository errors

* remove return

* drop open API validator

---------

Co-authored-by: Sören Busch <fleshgolem@gmx.net>
This commit is contained in:
Hayden
2023-02-06 18:43:12 -09:00
committed by GitHub
parent 91cd00976a
commit 9e77a9f367
86 changed files with 1776 additions and 1572 deletions

View File

@@ -4,7 +4,7 @@ from pathlib import Path
from fastapi.encoders import jsonable_encoder
from pydantic import BaseModel
from sqlalchemy import MetaData, create_engine
from sqlalchemy import MetaData, create_engine, insert, text
from sqlalchemy.engine import base
from sqlalchemy.orm import sessionmaker
@@ -85,41 +85,48 @@ class AlchemyExporter(BaseService):
Returns the schema of the SQLAlchemy database as a python dictionary. This dictionary is wrapped by
jsonable_encoder to ensure that the object can be converted to a json string.
"""
self.meta.reflect(bind=self.engine)
with self.engine.connect() as connection:
self.meta.reflect(bind=self.engine)
all_tables = self.meta.tables.values()
all_tables = self.meta.tables.values()
results = {
**{table.name: [] for table in all_tables},
"alembic_version": [dict(row) for row in self.engine.execute("SELECT * FROM alembic_version").fetchall()],
}
results = {
**{table.name: [] for table in all_tables},
"alembic_version": [
dict(row) for row in connection.execute(text("SELECT * FROM alembic_version")).mappings()
],
}
return jsonable_encoder(results)
return jsonable_encoder(results)
def dump(self) -> dict[str, list[dict]]:
"""
Returns the entire SQLAlchemy database as a python dictionary. This dictionary is wrapped by
jsonable_encoder to ensure that the object can be converted to a json string.
"""
self.meta.reflect(bind=self.engine) # http://docs.sqlalchemy.org/en/rel_0_9/core/reflection.html
result = {
table.name: [dict(row) for row in self.engine.execute(table.select())] for table in self.meta.sorted_tables
}
with self.engine.connect() as connection:
self.meta.reflect(bind=self.engine) # http://docs.sqlalchemy.org/en/rel_0_9/core/reflection.html
result = {
table.name: [dict(row) for row in connection.execute(table.select()).mappings()]
for table in self.meta.sorted_tables
}
return jsonable_encoder(result)
def restore(self, db_dump: dict) -> None:
"""Restores all data from dictionary into the database"""
data = AlchemyExporter.convert_to_datetime(db_dump)
with self.engine.begin() as connection:
data = AlchemyExporter.convert_to_datetime(db_dump)
self.meta.reflect(bind=self.engine)
for table_name, rows in data.items():
if not rows:
continue
self.meta.reflect(bind=self.engine)
for table_name, rows in data.items():
if not rows:
continue
table = self.meta.tables[table_name]
self.engine.execute(table.delete())
self.engine.execute(table.insert(), rows)
table = self.meta.tables[table_name]
connection.execute(table.delete())
connection.execute(insert(table), rows)
def drop_all(self) -> None:
"""Drops all data from the database"""
@@ -129,11 +136,11 @@ class AlchemyExporter(BaseService):
try:
if is_postgres:
session.execute("SET session_replication_role = 'replica'")
session.execute(text("SET session_replication_role = 'replica'"))
for table in self.meta.sorted_tables:
session.execute(f"DELETE FROM {table.name}")
session.execute(text(f"DELETE FROM {table.name}"))
finally:
if is_postgres:
session.execute("SET session_replication_role = 'origin'")
session.execute(text("SET session_replication_role = 'origin'"))
session.commit()

View File

@@ -69,7 +69,6 @@ class DefaultEmailSender(ABCEmailSender, BaseService):
"""
def send(self, email_to: str, subject: str, html: str) -> bool:
if self.settings.SMTP_FROM_EMAIL is None or self.settings.SMTP_FROM_NAME is None:
raise ValueError("SMTP_FROM_EMAIL and SMTP_FROM_NAME must be set in the config file.")

View File

@@ -8,6 +8,7 @@ from urllib.parse import parse_qs, urlencode, urlsplit, urlunsplit
from fastapi.encoders import jsonable_encoder
from pydantic import UUID4
from sqlalchemy import select
from sqlalchemy.orm.session import Session
from mealie.db.db_setup import session_context
@@ -143,12 +144,9 @@ class WebhookEventListener(EventListenerBase):
def get_scheduled_webhooks(self, start_dt: datetime, end_dt: datetime) -> list[ReadWebhook]:
"""Fetches all scheduled webhooks from the database"""
with self.ensure_session() as session:
return (
session.query(GroupWebhooksModel)
.where(
GroupWebhooksModel.enabled == True, # noqa: E712 - required for SQLAlchemy comparison
GroupWebhooksModel.scheduled_time > start_dt.astimezone(timezone.utc).time(),
GroupWebhooksModel.scheduled_time <= end_dt.astimezone(timezone.utc).time(),
)
.all()
stmt = select(GroupWebhooksModel).where(
GroupWebhooksModel.enabled == True, # noqa: E712 - required for SQLAlchemy comparison
GroupWebhooksModel.scheduled_time > start_dt.astimezone(timezone.utc).time(),
GroupWebhooksModel.scheduled_time <= end_dt.astimezone(timezone.utc).time(),
)
return session.execute(stmt).scalars().all()

View File

@@ -67,7 +67,6 @@ class BaseMigrator(BaseService):
self.report_id = self.report.id
def _save_all_entries(self) -> None:
is_success = True
is_failure = True

View File

@@ -17,7 +17,6 @@ replace_abbreviations = {
def replace_common_abbreviations(string: str) -> str:
for k, v in replace_abbreviations.items():
regex = rf"(?<=\d)\s?({k}\bs?)"
string = re.sub(regex, v, string)

View File

@@ -180,7 +180,6 @@ def import_data(lines):
# otherwise it's a token
# e.g.: potato \t I2 \t L5 \t NoCAP \t B-NAME/0.978253
else:
columns = re.split("\t", line.strip())
token = columns[0].strip()

View File

@@ -33,7 +33,6 @@ async def largest_content_len(urls: list[str]) -> tuple[str, int]:
tasks = [do(client, url) for url in urls]
responses: list[Response] = await gather_with_concurrency(10, *tasks)
for response in responses:
len_int = int(response.headers.get("Content-Length", 0))
if len_int > largest_len:
largest_url = str(response.url)

View File

@@ -108,7 +108,6 @@ class RecipeService(BaseService):
return Recipe(**additional_attrs)
def create_one(self, create_data: Recipe | CreateRecipe) -> Recipe:
if create_data.name is None:
create_data.name = "New Recipe"

View File

@@ -1,6 +1,8 @@
import datetime
from pathlib import Path
from sqlalchemy import select
from mealie.core import root_logger
from mealie.core.config import get_app_dirs
from mealie.db.db_setup import session_context
@@ -17,7 +19,8 @@ def purge_group_data_exports(max_minutes_old=ONE_DAY_AS_MINUTES):
limit = datetime.datetime.now() - datetime.timedelta(minutes=max_minutes_old)
with session_context() as session:
results = session.query(GroupDataExportsModel).filter(GroupDataExportsModel.expires <= limit)
stmt = select(GroupDataExportsModel).filter(GroupDataExportsModel.expires <= limit)
results = session.execute(stmt).scalars().all()
total_removed = 0
for result in results:

View File

@@ -1,5 +1,7 @@
import datetime
from sqlalchemy import delete
from mealie.core import root_logger
from mealie.db.db_setup import session_context
from mealie.db.models.users.password_reset import PasswordResetModel
@@ -15,7 +17,8 @@ def purge_password_reset_tokens():
limit = datetime.datetime.now() - datetime.timedelta(days=MAX_DAYS_OLD)
with session_context() as session:
session.query(PasswordResetModel).filter(PasswordResetModel.created_at <= limit).delete()
stmt = delete(PasswordResetModel).filter(PasswordResetModel.created_at <= limit)
session.execute(stmt)
session.commit()
session.close()
logger.info("password reset tokens purged")

View File

@@ -1,5 +1,7 @@
import datetime
from sqlalchemy import delete
from mealie.core import root_logger
from mealie.db.db_setup import session_context
from mealie.db.models.group import GroupInviteToken
@@ -15,7 +17,8 @@ def purge_group_registration():
limit = datetime.datetime.now() - datetime.timedelta(days=MAX_DAYS_OLD)
with session_context() as session:
session.query(GroupInviteToken).filter(GroupInviteToken.created_at <= limit).delete()
stmt = delete(GroupInviteToken).filter(GroupInviteToken.created_at <= limit)
session.execute(stmt)
session.commit()
session.close()

View File

@@ -83,7 +83,6 @@ class RecipeBulkScraperService(BaseService):
tasks = [_do(b.url) for b in urls.imports]
results = await gather(*tasks)
for b, recipe in zip(urls.imports, results, strict=True):
if not recipe:
continue