2024-06-03 12:47:31 -05:00
|
|
|
import os
|
|
|
|
from datetime import datetime
|
|
|
|
from dotenv import load_dotenv
|
|
|
|
from AppriseClient import apprise_notify
|
|
|
|
from HealthchecksIO import healthcheck_ping
|
|
|
|
import requests as r
|
|
|
|
import subprocess
|
|
|
|
now = datetime.now()
|
|
|
|
r.packages.urllib3.disable_warnings()
|
|
|
|
|
|
|
|
load_dotenv()
|
|
|
|
|
|
|
|
def to_bool(value):
|
|
|
|
"""
|
|
|
|
Converts 'something' to boolean. Raises exception for invalid formats
|
|
|
|
Possible True values: 1, True, "1", "TRue", "yes", "y", "t"
|
|
|
|
Possible False values: 0, False, "0", "faLse", "no", "n", "f"
|
|
|
|
"""
|
|
|
|
if str(value).lower() in ("yes", "y", "true", "t", "1"): return True
|
|
|
|
if str(value).lower() in ("no", "n", "false", "f", "0"): return False
|
|
|
|
raise Exception('Invalid value for boolean conversion: ' + str(value) + \
|
|
|
|
f'\nPossible True values: 1, True, "1", "TRue", "yes", "y", "t"\
|
|
|
|
\nPossible False values: 0, False, "0", "faLse", "no", "n", "f"')
|
|
|
|
|
|
|
|
# switches
|
|
|
|
docker_command = to_bool(os.getenv("docker_command"))
|
|
|
|
rclone_copy = to_bool(os.getenv("rclone_copy"))
|
|
|
|
rclone_config_create = to_bool(os.getenv("rclone_config_create"))
|
|
|
|
rclone_push = to_bool(os.getenv("rclone_push"))
|
|
|
|
restic_push = to_bool(os.getenv("restic_push"))
|
|
|
|
db_dump = to_bool(os.getenv("db_dump"))
|
|
|
|
zip_db_files = to_bool(os.getenv("zip_db_files"))
|
|
|
|
offload_db_files = to_bool(os.getenv("offload_db_files"))
|
|
|
|
cleanup = to_bool(os.getenv("cleanup"))
|
|
|
|
healthcheck = to_bool(os.getenv("healthcheck"))
|
|
|
|
notify = to_bool(os.getenv("notify"))
|
|
|
|
|
|
|
|
LOG_PATH = os.getenv("LOG_PATH")
|
|
|
|
|
|
|
|
# docker
|
|
|
|
container_name = os.getenv("container_name")
|
|
|
|
|
|
|
|
# data folders
|
|
|
|
seafile_data_local = os.getenv("seafile_data_local")
|
|
|
|
seafile_data_backup = os.getenv("seafile_data_backup")
|
|
|
|
|
|
|
|
# databases
|
|
|
|
databases = os.getenv("databases")
|
|
|
|
db_dump_host = os.getenv("db_dump_host")
|
|
|
|
db_dump_user = os.getenv("db_dump_user")
|
|
|
|
db_dump_password = os.getenv("db_dump_password")
|
|
|
|
db_dump_tmp_path = os.getenv("db_dump_tmp_path")
|
|
|
|
|
|
|
|
# Rclone remote
|
|
|
|
rclone_config_path = os.getenv("rclone_config_path")
|
|
|
|
rclone_remote = os.getenv("rclone_remote")
|
|
|
|
rclone_backend = os.getenv("rclone_backend")
|
|
|
|
rclone_provider = os.getenv("rclone_provider")
|
|
|
|
rclone_endpoint = os.getenv("rclone_endpoint")
|
|
|
|
rclone_remote_path = os.getenv("rclone_remote_path")
|
|
|
|
rclone_remote_db_path = os.getenv("rclone_remote_db_path")
|
|
|
|
rclone_environment_auth = os.getenv("rclone_environment_auth")
|
|
|
|
rclone_db_retention = os.getenv("rclone_db_retention")
|
|
|
|
|
|
|
|
# Restic remote
|
|
|
|
RESTIC_REPOSITORY = os.getenv("RESTIC_REPOSITORY")
|
|
|
|
AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID")
|
|
|
|
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")
|
|
|
|
RESTIC_PASSWORD = os.getenv("RESTIC_PASSWORD")
|
|
|
|
|
|
|
|
# healthchecks
|
|
|
|
healthcheck_url = os.getenv("healthcheck_url")
|
|
|
|
|
|
|
|
# notify
|
|
|
|
apprise_apprise_url = os.getenv("apprise_apprise_url")
|
|
|
|
apprise_aurls = os.getenv("apprise_aurls")
|
|
|
|
apprise_title = os.getenv("apprise_title")
|
|
|
|
apprise_body = os.getenv("apprise_body")
|
|
|
|
|
|
|
|
# healthcheck - Tell healthchecks.io we are starting the backup
|
|
|
|
if healthcheck:
|
|
|
|
healthcheck_ping(r, healthcheck_url, start=True)
|
|
|
|
|
|
|
|
# Stop seafile and seafile hub
|
|
|
|
if docker_command:
|
|
|
|
os.system(f'docker exec {container_name} /opt/seafile/seafile-serverlatest/seahub.sh stop')
|
|
|
|
os.system(f'docker exec {container_name} /opt/seafile/seafile-server-latest/seafile.sh stop')
|
|
|
|
|
|
|
|
# Dump the databases
|
|
|
|
if db_dump:
|
|
|
|
if not os.path.exists(db_dump_tmp_path):
|
|
|
|
os.system(f"mkdir -p {db_dump_tmp_path}")
|
|
|
|
for database in databases.split(','):
|
|
|
|
os.system(f'mariadb-dump -h {db_dump_host} -u {db_dump_user} -p{db_dump_password} --skip-opt\
|
|
|
|
{database} > {db_dump_tmp_path}{database}.{now.strftime("%m-%d-%Y_%H-%M-%S")}.sql')
|
|
|
|
|
|
|
|
# Create the config
|
|
|
|
if rclone_config_create:
|
|
|
|
rclone_config_check = str(subprocess.check_output(["rclone", "config", "file"]))
|
|
|
|
if "doesn't exist" in rclone_config_check:
|
|
|
|
os.system(f"rclone config create {rclone_remote} {rclone_backend} provider={rclone_provider}\
|
|
|
|
endpoint={rclone_endpoint} env_auth=true")
|
|
|
|
|
|
|
|
# Local rclone backup
|
|
|
|
if rclone_copy:
|
|
|
|
os.system(f'rclone sync -P {seafile_data_local} {seafile_data_backup}')
|
|
|
|
|
|
|
|
# Remote rclone backup
|
|
|
|
if rclone_push:
|
|
|
|
os.system(f'rclone sync -P {seafile_data_local} {rclone_remote}:{rclone_remote_path}')
|
|
|
|
|
|
|
|
# Remote restic backup
|
|
|
|
if restic_push:
|
|
|
|
os.system("/opt/restic.sh")
|
|
|
|
|
|
|
|
# Start seafile and seafile hub
|
|
|
|
if docker_command:
|
|
|
|
os.system(f'docker exec {container_name} /opt/seafile/seafile-server-latest/seafile.sh start')
|
|
|
|
os.system(f'docker exec {container_name} /opt/seafile/seafile-server-latest/seahub.sh start')
|
|
|
|
|
|
|
|
# compress db files
|
|
|
|
if zip_db_files:
|
|
|
|
os.system(f'zip -r {db_dump_tmp_path}/sfdb_{now.strftime("%m-%d-%Y_%H-%M-%S")} {db_dump_tmp_path}')
|
|
|
|
os.system(f'rm {db_dump_tmp_path}*.sql')
|
|
|
|
|
|
|
|
# offload db file
|
|
|
|
if offload_db_files:
|
|
|
|
os.system(f'rclone copy -P {db_dump_tmp_path} {rclone_remote}:{rclone_remote_db_path}')
|
|
|
|
|
|
|
|
# cleanup
|
|
|
|
if cleanup:
|
|
|
|
os.system(f'rm {db_dump_tmp_path}*sfdb_*')
|
|
|
|
os.system(f'rclone delete -P {rclone_db_retention} {rclone_remote}:{rclone_remote_db_path}')
|
|
|
|
|
|
|
|
# healthcheck
|
|
|
|
if healthcheck:
|
|
|
|
healthcheck_ping(r, healthcheck_url)
|
|
|
|
|
|
|
|
# notification
|
|
|
|
if notify:
|
2023-08-26 02:46:07 -05:00
|
|
|
apprise_notify(r, apprise_apprise_url, apprise_aurls, apprise_title, apprise_body)
|