mirror of
https://github.com/mealie-recipes/mealie.git
synced 2026-02-07 08:23:12 -05:00
Feature/database backups (#1040)
* add annotations to docs * alchemy data dumper * initial tests * sourcery refactor * db backups/restore * potential postgres fix * potential postgres fix * this is terrible * potential pg fix * cleanup * remove unused import * fix comparison * generate frontend types * update timestamp and add directory filter * rewrite to new admin-api * update backup routers * add file_token response helper * update imports * remove test_backup
This commit is contained in:
@@ -5,7 +5,7 @@ from fastapi.middleware.gzip import GZipMiddleware
|
||||
from mealie.core.config import get_app_settings
|
||||
from mealie.core.root_logger import get_logger
|
||||
from mealie.core.settings.static import APP_VERSION
|
||||
from mealie.routes import backup_routes, router, utility_routes
|
||||
from mealie.routes import router, utility_routes
|
||||
from mealie.routes.handlers import register_debug_handler
|
||||
from mealie.routes.media import media_router
|
||||
from mealie.services.scheduler import SchedulerRegistry, SchedulerService, tasks
|
||||
@@ -69,7 +69,6 @@ def start_scheduler():
|
||||
def api_routers():
|
||||
app.include_router(router)
|
||||
app.include_router(media_router)
|
||||
app.include_router(backup_routes.router)
|
||||
app.include_router(utility_routes.router)
|
||||
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ class SQLiteProvider(AbstractDBProvider, BaseModel):
|
||||
|
||||
@property
|
||||
def db_url(self) -> str:
|
||||
return "sqlite:///" + str(self.db_path.absolute())
|
||||
return f"sqlite:///{str(self.db_path.absolute())}"
|
||||
|
||||
@property
|
||||
def db_url_public(self) -> str:
|
||||
@@ -59,7 +59,5 @@ class PostgresProvider(AbstractDBProvider, BaseSettings):
|
||||
def db_provider_factory(provider_name: str, data_dir: Path, env_file: Path, env_encoding="utf-8") -> AbstractDBProvider:
|
||||
if provider_name == "postgres":
|
||||
return PostgresProvider(_env_file=env_file, _env_file_encoding=env_encoding)
|
||||
elif provider_name == "sqlite":
|
||||
return SQLiteProvider(data_dir=data_dir)
|
||||
else:
|
||||
return SQLiteProvider(data_dir=data_dir)
|
||||
|
||||
@@ -1,6 +1,14 @@
|
||||
from mealie.routes._base.routers import AdminAPIRouter
|
||||
|
||||
from . import admin_about, admin_email, admin_log, admin_management_groups, admin_management_users, admin_server_tasks
|
||||
from . import (
|
||||
admin_about,
|
||||
admin_backups,
|
||||
admin_email,
|
||||
admin_log,
|
||||
admin_management_groups,
|
||||
admin_management_users,
|
||||
admin_server_tasks,
|
||||
)
|
||||
|
||||
router = AdminAPIRouter(prefix="/admin")
|
||||
|
||||
@@ -10,3 +18,4 @@ router.include_router(admin_management_users.router)
|
||||
router.include_router(admin_management_groups.router)
|
||||
router.include_router(admin_email.router, tags=["Admin: Email"])
|
||||
router.include_router(admin_server_tasks.router, tags=["Admin: Server Tasks"])
|
||||
router.include_router(admin_backups.router)
|
||||
|
||||
95
mealie/routes/admin/admin_backups.py
Normal file
95
mealie/routes/admin/admin_backups.py
Normal file
@@ -0,0 +1,95 @@
|
||||
import operator
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import APIRouter, File, HTTPException, UploadFile, status
|
||||
|
||||
from mealie.core.config import get_app_dirs
|
||||
from mealie.core.security import create_file_token
|
||||
from mealie.pkgs.stats.fs_stats import pretty_size
|
||||
from mealie.routes._base import BaseAdminController, controller
|
||||
from mealie.schema.admin.backup import AllBackups, BackupFile
|
||||
from mealie.schema.response.responses import FileTokenResponse, SuccessResponse
|
||||
from mealie.services.backups_v2.backup_v2 import BackupV2
|
||||
|
||||
router = APIRouter(prefix="/backups")
|
||||
|
||||
|
||||
@controller(router)
|
||||
class AdminBackupController(BaseAdminController):
|
||||
def _backup_path(self, name) -> Path:
|
||||
return get_app_dirs().BACKUP_DIR / name
|
||||
|
||||
@router.get("", response_model=AllBackups)
|
||||
def get_all(self):
|
||||
app_dirs = get_app_dirs()
|
||||
imports = []
|
||||
for archive in app_dirs.BACKUP_DIR.glob("*.zip"):
|
||||
backup = BackupFile(
|
||||
name=archive.name, date=archive.stat().st_ctime, size=pretty_size(archive.stat().st_size)
|
||||
)
|
||||
imports.append(backup)
|
||||
|
||||
templates = [template.name for template in app_dirs.TEMPLATE_DIR.glob("*.*")]
|
||||
imports.sort(key=operator.attrgetter("date"), reverse=True)
|
||||
|
||||
return AllBackups(imports=imports, templates=templates)
|
||||
|
||||
@router.post("", status_code=status.HTTP_201_CREATED, response_model=SuccessResponse)
|
||||
def create_one(self):
|
||||
backup = BackupV2()
|
||||
|
||||
try:
|
||||
backup.backup()
|
||||
except Exception as e:
|
||||
raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR) from e
|
||||
|
||||
return SuccessResponse.respond("Backup created successfully")
|
||||
|
||||
@router.get("/{file_name}", response_model=FileTokenResponse)
|
||||
def get_one(self, file_name: str):
|
||||
"""Returns a token to download a file"""
|
||||
file = self._backup_path(file_name)
|
||||
|
||||
if not file.exists():
|
||||
raise HTTPException(status.HTTP_404_NOT_FOUND)
|
||||
|
||||
return FileTokenResponse.respond(create_file_token(file))
|
||||
|
||||
@router.delete("/{file_name}", status_code=status.HTTP_200_OK, response_model=SuccessResponse)
|
||||
def delete_one(self, file_name: str):
|
||||
file = self._backup_path(file_name)
|
||||
|
||||
if not file.is_file():
|
||||
raise HTTPException(status.HTTP_400_BAD_REQUEST)
|
||||
try:
|
||||
file.unlink()
|
||||
except Exception as e:
|
||||
raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR) from e
|
||||
|
||||
return SuccessResponse.respond(f"{file_name} has been deleted.")
|
||||
|
||||
@router.post("/upload", response_model=SuccessResponse)
|
||||
def upload_one(self, archive: UploadFile = File(...)):
|
||||
"""Upload a .zip File to later be imported into Mealie"""
|
||||
app_dirs = get_app_dirs()
|
||||
dest = app_dirs.BACKUP_DIR.joinpath(archive.filename)
|
||||
|
||||
with dest.open("wb") as buffer:
|
||||
shutil.copyfileobj(archive.file, buffer)
|
||||
|
||||
if not dest.is_file:
|
||||
raise HTTPException(status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@router.post("/{file_name}/restore", response_model=SuccessResponse)
|
||||
def import_one(self, file_name: str):
|
||||
backup = BackupV2()
|
||||
|
||||
file = self._backup_path(file_name)
|
||||
|
||||
try:
|
||||
backup.restore(file)
|
||||
except Exception as e:
|
||||
raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR) from e
|
||||
|
||||
return SuccessResponse.respond("Restore successful")
|
||||
@@ -1,111 +0,0 @@
|
||||
import operator
|
||||
import shutil
|
||||
|
||||
from fastapi import Depends, File, HTTPException, UploadFile, status
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
from mealie.core.config import get_app_dirs
|
||||
from mealie.core.dependencies import get_current_user
|
||||
from mealie.core.root_logger import get_logger
|
||||
from mealie.core.security import create_file_token
|
||||
from mealie.db.db_setup import generate_session
|
||||
from mealie.pkgs.stats.fs_stats import pretty_size
|
||||
from mealie.routes._base.routers import AdminAPIRouter
|
||||
from mealie.schema.admin import AllBackups, BackupFile, CreateBackup, ImportJob
|
||||
from mealie.schema.user.user import PrivateUser
|
||||
from mealie.services.backups import imports
|
||||
from mealie.services.backups.exports import backup_all
|
||||
|
||||
router = AdminAPIRouter(prefix="/api/backups", tags=["Backups"])
|
||||
logger = get_logger()
|
||||
app_dirs = get_app_dirs()
|
||||
|
||||
|
||||
@router.get("/available", response_model=AllBackups)
|
||||
def available_imports():
|
||||
"""Returns a list of avaiable .zip files for import into Mealie."""
|
||||
imports = []
|
||||
for archive in app_dirs.BACKUP_DIR.glob("*.zip"):
|
||||
backup = BackupFile(name=archive.name, date=archive.stat().st_ctime, size=pretty_size(archive.stat().st_size))
|
||||
imports.append(backup)
|
||||
|
||||
templates = [template.name for template in app_dirs.TEMPLATE_DIR.glob("*.*")]
|
||||
imports.sort(key=operator.attrgetter("date"), reverse=True)
|
||||
|
||||
return AllBackups(imports=imports, templates=templates)
|
||||
|
||||
|
||||
@router.post("/export/database", status_code=status.HTTP_201_CREATED)
|
||||
def export_database(data: CreateBackup, session: Session = Depends(generate_session)):
|
||||
"""Generates a backup of the recipe database in json format."""
|
||||
try:
|
||||
export_path = backup_all(
|
||||
session=session,
|
||||
tag=data.tag,
|
||||
templates=data.templates,
|
||||
export_recipes=data.options.recipes,
|
||||
export_users=data.options.users,
|
||||
export_groups=data.options.groups,
|
||||
export_notifications=data.options.notifications,
|
||||
)
|
||||
|
||||
return {"export_path": export_path}
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
|
||||
|
||||
@router.post("/upload", status_code=status.HTTP_200_OK)
|
||||
def upload_backup_file(archive: UploadFile = File(...)):
|
||||
"""Upload a .zip File to later be imported into Mealie"""
|
||||
dest = app_dirs.BACKUP_DIR.joinpath(archive.filename)
|
||||
|
||||
with dest.open("wb") as buffer:
|
||||
shutil.copyfileobj(archive.file, buffer)
|
||||
|
||||
if not dest.is_file:
|
||||
raise HTTPException(status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
@router.get("/{file_name}/download")
|
||||
async def download_backup_file(file_name: str):
|
||||
"""Returns a token to download a file"""
|
||||
file = app_dirs.BACKUP_DIR.joinpath(file_name)
|
||||
|
||||
return {"fileToken": create_file_token(file)}
|
||||
|
||||
|
||||
@router.post("/{file_name}/import", status_code=status.HTTP_200_OK)
|
||||
def import_database(
|
||||
import_data: ImportJob,
|
||||
session: Session = Depends(generate_session),
|
||||
user: PrivateUser = Depends(get_current_user),
|
||||
):
|
||||
"""Import a database backup file generated from Mealie."""
|
||||
|
||||
return imports.import_database(
|
||||
user=user,
|
||||
session=session,
|
||||
archive=import_data.name,
|
||||
import_recipes=import_data.recipes,
|
||||
import_settings=import_data.settings,
|
||||
import_users=import_data.users,
|
||||
import_groups=import_data.groups,
|
||||
force_import=import_data.force,
|
||||
rebase=import_data.rebase,
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/{file_name}/delete", status_code=status.HTTP_200_OK)
|
||||
def delete_backup(file_name: str):
|
||||
"""Removes a database backup from the file system"""
|
||||
file_path = app_dirs.BACKUP_DIR.joinpath(file_name)
|
||||
|
||||
if not file_path.is_file():
|
||||
raise HTTPException(status.HTTP_400_BAD_REQUEST)
|
||||
try:
|
||||
file_path.unlink()
|
||||
except Exception:
|
||||
raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
|
||||
return {"message": f"{file_name} has been deleted."}
|
||||
@@ -1,5 +1,6 @@
|
||||
from typing import Optional
|
||||
|
||||
from fastapi_camelcase import CamelModel
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
@@ -28,3 +29,15 @@ class SuccessResponse(BaseModel):
|
||||
in the same call, for use while providing details to a HTTPException
|
||||
"""
|
||||
return cls(message=message).dict()
|
||||
|
||||
|
||||
class FileTokenResponse(CamelModel):
|
||||
file_token: str
|
||||
|
||||
@classmethod
|
||||
def respond(cls, token: str) -> dict:
|
||||
"""
|
||||
This method is an helper to create an obect and convert to a dictionary
|
||||
in the same call, for use while providing details to a HTTPException
|
||||
"""
|
||||
return cls(file_token=token).dict()
|
||||
|
||||
0
mealie/services/backups_v2/__init__.py
Normal file
0
mealie/services/backups_v2/__init__.py
Normal file
138
mealie/services/backups_v2/alchemy_exporter.py
Normal file
138
mealie/services/backups_v2/alchemy_exporter.py
Normal file
@@ -0,0 +1,138 @@
|
||||
import datetime
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import MetaData, create_engine
|
||||
from sqlalchemy.engine import base
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from mealie.services._base_service import BaseService
|
||||
|
||||
|
||||
class AlchemyExporter(BaseService):
|
||||
connection_str: str
|
||||
engine: base.Engine
|
||||
meta: MetaData
|
||||
|
||||
look_for_datetime = {"created_at", "update_at", "date_updated", "timestamp", "expires_at"}
|
||||
look_for_date = {"date_added", "date"}
|
||||
|
||||
class DateTimeParser(BaseModel):
|
||||
date: datetime.date = None
|
||||
time: datetime.datetime = None
|
||||
|
||||
def __init__(self, connection_str: str) -> None:
|
||||
super().__init__()
|
||||
|
||||
self.connection_str = connection_str
|
||||
self.engine = create_engine(connection_str)
|
||||
self.meta = MetaData()
|
||||
self.session_maker = sessionmaker(bind=self.engine)
|
||||
|
||||
@staticmethod
|
||||
def convert_to_datetime(data: dict) -> dict:
|
||||
"""
|
||||
walks the dictionary to convert all things that look like timestamps to datetime objects
|
||||
used in the context of reading a json file into a database via SQLAlchemy.
|
||||
"""
|
||||
for key, value in data.items():
|
||||
if isinstance(value, dict):
|
||||
data = AlchemyExporter.convert_to_datetime(value)
|
||||
elif isinstance(value, list): # assume that this is a list of dictionaries
|
||||
data[key] = [AlchemyExporter.convert_to_datetime(item) for item in value]
|
||||
elif isinstance(value, str):
|
||||
if key in AlchemyExporter.look_for_datetime:
|
||||
data[key] = AlchemyExporter.DateTimeParser(time=value).time
|
||||
if key in AlchemyExporter.look_for_date:
|
||||
data[key] = AlchemyExporter.DateTimeParser(date=value).date
|
||||
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def _compare_schemas(schema1: dict, schema2: dict) -> bool:
|
||||
try:
|
||||
# validate alembic version(s) are the same
|
||||
return schema1["alembic_version"] == schema2["alembic_version"]
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def validate_schemas(schema1: Path | dict, schema2: Path | dict) -> bool:
|
||||
"""
|
||||
Validates that the schema of the database matches the schema of the database. In practice,
|
||||
this means validating that the alembic version is the same
|
||||
"""
|
||||
|
||||
def extract_json(file: Path) -> dict:
|
||||
with open(file) as f:
|
||||
return json.loads(f.read())
|
||||
|
||||
if isinstance(schema1, Path):
|
||||
schema1 = extract_json(schema1)
|
||||
|
||||
if isinstance(schema2, Path):
|
||||
schema2 = extract_json(schema2)
|
||||
|
||||
return AlchemyExporter._compare_schemas(schema1, schema2)
|
||||
|
||||
def dump_schema(self) -> dict:
|
||||
"""
|
||||
Returns the schema of the SQLAlchemy database as a python dictionary. This dictionary is wrapped by
|
||||
jsonable_encoder to ensure that the object can be converted to a json string.
|
||||
"""
|
||||
self.meta.reflect(bind=self.engine)
|
||||
|
||||
all_tables = self.meta.tables.values()
|
||||
|
||||
results = {
|
||||
**{table.name: [] for table in all_tables},
|
||||
"alembic_version": [dict(row) for row in self.engine.execute("SELECT * FROM alembic_version").fetchall()],
|
||||
}
|
||||
|
||||
return jsonable_encoder(results)
|
||||
|
||||
def dump(self) -> dict[str, list[dict]]:
|
||||
"""
|
||||
Returns the entire SQLAlchemy database as a python dictionary. This dictionary is wrapped by
|
||||
jsonable_encoder to ensure that the object can be converted to a json string.
|
||||
"""
|
||||
self.meta.reflect(bind=self.engine) # http://docs.sqlalchemy.org/en/rel_0_9/core/reflection.html
|
||||
result = {
|
||||
table.name: [dict(row) for row in self.engine.execute(table.select())] for table in self.meta.sorted_tables
|
||||
}
|
||||
|
||||
return jsonable_encoder(result)
|
||||
|
||||
def restore(self, db_dump: dict) -> None:
|
||||
"""Restores all data from dictionary into the database"""
|
||||
data = AlchemyExporter.convert_to_datetime(db_dump)
|
||||
|
||||
self.meta.reflect(bind=self.engine)
|
||||
for table_name, rows in data.items():
|
||||
if not rows:
|
||||
continue
|
||||
|
||||
table = self.meta.tables[table_name]
|
||||
self.engine.execute(table.delete())
|
||||
self.engine.execute(table.insert(), rows)
|
||||
|
||||
def drop_all(self) -> None:
|
||||
"""Drops all data from the database"""
|
||||
self.meta.reflect(bind=self.engine)
|
||||
with self.session_maker() as session:
|
||||
session: Session
|
||||
|
||||
is_postgres = self.settings.DB_ENGINE == "postgres"
|
||||
|
||||
try:
|
||||
if is_postgres:
|
||||
session.execute("SET session_replication_role = 'replica'")
|
||||
|
||||
for table in self.meta.sorted_tables:
|
||||
session.execute(f"DELETE FROM {table.name}")
|
||||
finally:
|
||||
if is_postgres:
|
||||
session.execute("SET session_replication_role = 'origin'")
|
||||
session.commit()
|
||||
45
mealie/services/backups_v2/backup_file.py
Normal file
45
mealie/services/backups_v2/backup_file.py
Normal file
@@ -0,0 +1,45 @@
|
||||
import json
|
||||
import shutil
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class BackupContents:
|
||||
def __init__(self, file: Path) -> None:
|
||||
self.base = file
|
||||
self.data_directory = self.base / "data"
|
||||
self.tables = self.base / "database.json"
|
||||
|
||||
def validate(self) -> bool:
|
||||
if not self.base.is_dir():
|
||||
return False
|
||||
|
||||
if not self.data_directory.is_dir():
|
||||
return False
|
||||
|
||||
if not self.tables.is_file():
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def read_tables(self) -> dict:
|
||||
with open(self.tables) as f:
|
||||
return json.loads(f.read())
|
||||
|
||||
|
||||
class BackupFile:
|
||||
temp_dir: Path | None
|
||||
|
||||
def __init__(self, file: Path) -> None:
|
||||
self.zip = file
|
||||
|
||||
def __enter__(self) -> BackupContents:
|
||||
self.temp_dir = Path(tempfile.mkdtemp())
|
||||
shutil.unpack_archive(str(self.zip), str(self.temp_dir))
|
||||
return BackupContents(self.temp_dir)
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if self.temp_dir and self.temp_dir.is_dir():
|
||||
shutil.rmtree(self.temp_dir)
|
||||
|
||||
self.temp_dir = None
|
||||
98
mealie/services/backups_v2/backup_v2.py
Normal file
98
mealie/services/backups_v2/backup_v2.py
Normal file
@@ -0,0 +1,98 @@
|
||||
import datetime
|
||||
import json
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from zipfile import ZipFile
|
||||
|
||||
from mealie.services._base_service import BaseService
|
||||
from mealie.services.backups_v2.alchemy_exporter import AlchemyExporter
|
||||
from mealie.services.backups_v2.backup_file import BackupFile
|
||||
|
||||
|
||||
class BackupV2(BaseService):
|
||||
def __init__(self, db_url: str = None) -> None:
|
||||
super().__init__()
|
||||
|
||||
self.db_url = db_url or self.settings.DB_URL
|
||||
self.db_exporter = AlchemyExporter(self.db_url)
|
||||
|
||||
def _sqlite(self) -> None:
|
||||
db_file = self.settings.DB_URL.removeprefix("sqlite:///")
|
||||
|
||||
# Create a backup of the SQLite database
|
||||
timestamp = datetime.datetime.now().strftime("%Y.%m.%d")
|
||||
shutil.copy(db_file, f"mealie_{timestamp}.bak.db")
|
||||
|
||||
def _postgres(self) -> None:
|
||||
pass
|
||||
|
||||
def backup(self) -> Path:
|
||||
# sourcery skip: merge-nested-ifs, reintroduce-else, remove-redundant-continue
|
||||
exclude = {"mealie.db", "mealie.log", ".secret"}
|
||||
exclude_ext = {".zip"}
|
||||
exclude_dirs = {"backups"}
|
||||
|
||||
timestamp = datetime.datetime.now().strftime("%Y.%m.%d.%H.%M.%S")
|
||||
|
||||
backup_name = f"mealie_{timestamp}.zip"
|
||||
backup_file = self.directories.BACKUP_DIR / backup_name
|
||||
|
||||
database_json = self.db_exporter.dump()
|
||||
|
||||
with ZipFile(backup_file, "w") as zip_file:
|
||||
zip_file.writestr("database.json", json.dumps(database_json))
|
||||
|
||||
for data_file in self.directories.DATA_DIR.glob("**/*"):
|
||||
if data_file.name in exclude:
|
||||
continue
|
||||
|
||||
if data_file.is_file() and data_file.suffix not in exclude_ext:
|
||||
if data_file.parent.name in exclude_dirs:
|
||||
continue
|
||||
|
||||
zip_file.write(data_file, f"data/{data_file.relative_to(self.directories.DATA_DIR)}")
|
||||
|
||||
return backup_file
|
||||
|
||||
def _copy_data(self, data_path: Path) -> None:
|
||||
for f in data_path.iterdir():
|
||||
if f.is_file():
|
||||
continue
|
||||
|
||||
shutil.rmtree(self.directories.DATA_DIR / f.name)
|
||||
shutil.copytree(f, self.directories.DATA_DIR / f.name)
|
||||
|
||||
def restore(self, backup_path: Path) -> None:
|
||||
self.logger.info("initially backup restore")
|
||||
|
||||
backup = BackupFile(backup_path)
|
||||
|
||||
if self.settings.DB_ENGINE == "sqlite":
|
||||
self._sqlite()
|
||||
elif self.settings.DB_ENGINE == "postgres":
|
||||
self._postgres()
|
||||
|
||||
with backup as contents:
|
||||
if not contents.validate():
|
||||
self.logger.error(
|
||||
"Invalid backup file. file does not contain required elements (data directory and database.json"
|
||||
)
|
||||
raise ValueError("Invalid backup file")
|
||||
|
||||
# Purge the Database
|
||||
|
||||
self.logger.info("dropping all database tables")
|
||||
self.db_exporter.drop_all()
|
||||
|
||||
database_json = contents.read_tables()
|
||||
|
||||
self.logger.info("importing database tables")
|
||||
self.db_exporter.restore(database_json)
|
||||
|
||||
self.logger.info("database tables imported successfully")
|
||||
|
||||
self.logger.info("restoring data directory")
|
||||
self._copy_data(contents.data_directory)
|
||||
self.logger.info("data directory restored successfully")
|
||||
|
||||
self.logger.info("backup restore complete")
|
||||
Reference in New Issue
Block a user