changed the healthcheck
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
jblu 2024-06-03 12:47:31 -05:00
parent c79625a64a
commit f17988bfa9
7 changed files with 269 additions and 273 deletions

View File

@ -1,37 +1,35 @@
kind: pipeline
name: default
steps:
- name: docker
image: plugins/docker
settings:
registry: git.jonb.io
dry_run: false
username: jblu
password:
from_secret: gittea_drone
repo: git.jonb.io/jblu/seafile-backup
tags:
- latest
when:
branch:
- main
event:
- push
- pull_request
- name: docker-test
image: plugins/docker
settings:
registry: git.jonb.io
dry_run: false
username: jblu
password:
from_secret: gittea_drone
repo: git.jonb.io/jblu/seafile-backup
tags:
- dev
when:
branch:
- dev*
event:
- push
- pull_request
kind: pipeline
name: default
steps:
- name: docker
image: plugins/docker
settings:
registry: git.jonb.io
dry_run: false
username: jblu
password:
from_secret: gittea_drone
repo: git.jonb.io/jblu/seafile-backup
tags:
- latest
when:
branch:
- main
event:
- push
- name: docker-test
image: plugins/docker
settings:
registry: git.jonb.io
dry_run: false
username: jblu
password:
from_secret: gittea_drone
repo: git.jonb.io/jblu/seafile-backup
tags:
- dev
when:
branch:
- dev*
event:
- push

16
.gitignore vendored
View File

@ -1,9 +1,9 @@
.env
seafile-backup.sh
*.log
*.conf
*tests*
*tests*
*dest*
*db*
.env
seafile-backup.sh
*.log
*.conf
*tests*
*tests*
*dest*
*db*
*pycache*

View File

@ -1,39 +1,39 @@
import requests as r
from tomllib import load
import os
def apprise_notify(req_obj, apprise_url, aurls, title, body):
payload = {'urls': aurls,'title': title,'body': body,}
apprise_response = req_obj.post(apprise_url, json = payload ,verify=False)
return apprise_response
class AppriseClient:
def __init__(self):
self.config = ''
try:
if os.environ["DOCKER"]:
self.host = os.environ["host"]
self.port = os.environ["port"]
self.aurls = os.environ["aurls"]
self.title = os.environ["title"]
self.body = os.environ["body"]
if os.environ["toml_path"]:
config_file_path=os.environ["toml_path"]
with open(config_file_path, 'rb') as c:
self.config = load(c)
except:
KeyError
if os.path.exists('./config.toml'):
config_file_path = './config.toml'
with open(config_file_path, 'rb') as c:
self.config = load(c)
if self.config:
self.host = self.config["apprise"]["host"]
self.port = self.config["apprise"]["port"]
self.aurls = self.config["apprise"]["aurls"]
self.title = self.config["apprise"]["title"]
self.body = self.config["apprise"]["body"]
self.apprise_response = apprise_notify(r,self.host,self.port,self.aurls,self.title,self.body)
if __name__ == "__main__":
import requests as r
from tomllib import load
import os
def apprise_notify(req_obj, apprise_url, aurls, title, body):
payload = {'urls': aurls,'title': title,'body': body,}
apprise_response = req_obj.post(apprise_url, json = payload ,verify=False)
return apprise_response
class AppriseClient:
def __init__(self):
self.config = ''
try:
if os.environ["DOCKER"]:
self.host = os.environ["host"]
self.port = os.environ["port"]
self.aurls = os.environ["aurls"]
self.title = os.environ["title"]
self.body = os.environ["body"]
if os.environ["toml_path"]:
config_file_path=os.environ["toml_path"]
with open(config_file_path, 'rb') as c:
self.config = load(c)
except:
KeyError
if os.path.exists('./config.toml'):
config_file_path = './config.toml'
with open(config_file_path, 'rb') as c:
self.config = load(c)
if self.config:
self.host = self.config["apprise"]["host"]
self.port = self.config["apprise"]["port"]
self.aurls = self.config["apprise"]["aurls"]
self.title = self.config["apprise"]["title"]
self.body = self.config["apprise"]["body"]
self.apprise_response = apprise_notify(r,self.host,self.port,self.aurls,self.title,self.body)
if __name__ == "__main__":
AppriseClient()

View File

@ -1,8 +1,6 @@
def healthcheck_ping(req_obj, url, start=False):
if start == True:
url + '/start'
try:
req_obj.get(url, timeout=10)
except req_obj.RequestException as e:
# Log ping failure here...
def healthcheck_ping(req_obj, url):
try:
req_obj.get(url, timeout=5)
except req_obj.RequestException as e:
# Log ping failure here...
print("Ping failed: %s" % e)

View File

@ -1,7 +1,7 @@
#!/bin/sh
CRON_CONFIG_FILE="/opt/crontab"
echo "${CRON} python /opt/seafile-backup.py" > $CRON_CONFIG_FILE
#!/bin/sh
CRON_CONFIG_FILE="/opt/crontab"
echo "${CRON} python /opt/seafile-backup.py" > $CRON_CONFIG_FILE
exec supercronic -passthrough-logs -quiet $CRON_CONFIG_FILE

View File

@ -1,38 +1,38 @@
#!/bin/ash
: "${RESTIC_REPOSITORY:?Need the restic repository}"
: "${AWS_ACCESS_KEY_ID:?Need the access key id}"
: "${AWS_SECRET_ACCESS_KEY:?Need the secret access key}"
: "${RESTIC_PASSWORD:?Need the restic password}"
: "${LOG_PATH:-./restic-backup.log}"
: "${seafile_data_local:-/seafile}"
# need to securely provide password: https://restic.readthedocs.io/en/latest/faq.html#how-can-i-specify-encryption-passwords-automatically
restic snapshots > /dev/null || restic init
#Define a timestamp function
timestamp() {
date "+%b %d %Y %T %Z"
}
# insert timestamp into log
printf "\n\n"
echo "-------------------------------------------------------------------------------" | tee -a $LOG_PATH
echo "$(timestamp): restic-backup.sh started" | tee -a $LOG_PATH
# Run Backups
restic backup $seafile_data_local | tee -a $LOG_PATH
# Remove snapshots according to policy
# If run cron more frequently, might add --keep-hourly 24
restic forget --keep-daily 7 --keep-weekly 4 --keep-monthly 12 --keep-yearly 7 | tee -a $LOG_PATH
# Remove unneeded data from the repository
restic prune | tee -a $LOG_PATH
# Check the repository for errors
restic check | tee -a $LOG_PATH
# insert timestamp into log
printf "\n\n"
echo "-------------------------------------------------------------------------------" | tee -a $LOG_PATH
#!/bin/ash
: "${RESTIC_REPOSITORY:?Need the restic repository}"
: "${AWS_ACCESS_KEY_ID:?Need the access key id}"
: "${AWS_SECRET_ACCESS_KEY:?Need the secret access key}"
: "${RESTIC_PASSWORD:?Need the restic password}"
: "${LOG_PATH:-./restic-backup.log}"
: "${seafile_data_local:-/seafile}"
# need to securely provide password: https://restic.readthedocs.io/en/latest/faq.html#how-can-i-specify-encryption-passwords-automatically
restic snapshots > /dev/null || restic init
#Define a timestamp function
timestamp() {
date "+%b %d %Y %T %Z"
}
# insert timestamp into log
printf "\n\n"
echo "-------------------------------------------------------------------------------" | tee -a $LOG_PATH
echo "$(timestamp): restic-backup.sh started" | tee -a $LOG_PATH
# Run Backups
restic backup $seafile_data_local | tee -a $LOG_PATH
# Remove snapshots according to policy
# If run cron more frequently, might add --keep-hourly 24
restic forget --keep-daily 7 --keep-weekly 4 --keep-monthly 12 --keep-yearly 7 | tee -a $LOG_PATH
# Remove unneeded data from the repository
restic prune | tee -a $LOG_PATH
# Check the repository for errors
restic check | tee -a $LOG_PATH
# insert timestamp into log
printf "\n\n"
echo "-------------------------------------------------------------------------------" | tee -a $LOG_PATH
echo "$(timestamp): restic-backup.sh finished" | tee -a $LOG_PATH

View File

@ -1,141 +1,141 @@
import os
from datetime import datetime
from dotenv import load_dotenv
from AppriseClient import apprise_notify
from HealthchecksIO import healthcheck_ping
import requests as r
import subprocess
now = datetime.now()
r.packages.urllib3.disable_warnings()
load_dotenv()
def to_bool(value):
"""
Converts 'something' to boolean. Raises exception for invalid formats
Possible True values: 1, True, "1", "TRue", "yes", "y", "t"
Possible False values: 0, False, "0", "faLse", "no", "n", "f"
"""
if str(value).lower() in ("yes", "y", "true", "t", "1"): return True
if str(value).lower() in ("no", "n", "false", "f", "0"): return False
raise Exception('Invalid value for boolean conversion: ' + str(value) + \
f'\nPossible True values: 1, True, "1", "TRue", "yes", "y", "t"\
\nPossible False values: 0, False, "0", "faLse", "no", "n", "f"')
# switches
docker_command = to_bool(os.getenv("docker_command"))
rclone_copy = to_bool(os.getenv("rclone_copy"))
rclone_config_create = to_bool(os.getenv("rclone_config_create"))
rclone_push = to_bool(os.getenv("rclone_push"))
restic_push = to_bool(os.getenv("restic_push"))
db_dump = to_bool(os.getenv("db_dump"))
zip_db_files = to_bool(os.getenv("zip_db_files"))
offload_db_files = to_bool(os.getenv("offload_db_files"))
cleanup = to_bool(os.getenv("cleanup"))
healthcheck = to_bool(os.getenv("healthcheck"))
notify = to_bool(os.getenv("notify"))
LOG_PATH = os.getenv("LOG_PATH")
# docker
container_name = os.getenv("container_name")
# data folders
seafile_data_local = os.getenv("seafile_data_local")
seafile_data_backup = os.getenv("seafile_data_backup")
# databases
databases = os.getenv("databases")
db_dump_host = os.getenv("db_dump_host")
db_dump_user = os.getenv("db_dump_user")
db_dump_password = os.getenv("db_dump_password")
db_dump_tmp_path = os.getenv("db_dump_tmp_path")
# Rclone remote
rclone_config_path = os.getenv("rclone_config_path")
rclone_remote = os.getenv("rclone_remote")
rclone_backend = os.getenv("rclone_backend")
rclone_provider = os.getenv("rclone_provider")
rclone_endpoint = os.getenv("rclone_endpoint")
rclone_remote_path = os.getenv("rclone_remote_path")
rclone_remote_db_path = os.getenv("rclone_remote_db_path")
rclone_environment_auth = os.getenv("rclone_environment_auth")
rclone_db_retention = os.getenv("rclone_db_retention")
# Restic remote
RESTIC_REPOSITORY = os.getenv("RESTIC_REPOSITORY")
AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")
RESTIC_PASSWORD = os.getenv("RESTIC_PASSWORD")
# healthchecks
healthcheck_url = os.getenv("healthcheck_url")
# notify
apprise_apprise_url = os.getenv("apprise_apprise_url")
apprise_aurls = os.getenv("apprise_aurls")
apprise_title = os.getenv("apprise_title")
apprise_body = os.getenv("apprise_body")
# healthcheck - Tell healthchecks.io we are starting the backup
if healthcheck:
healthcheck_ping(r, healthcheck_url, start=True)
# Stop seafile and seafile hub
if docker_command:
os.system(f'docker exec {container_name} /opt/seafile/seafile-serverlatest/seahub.sh stop')
os.system(f'docker exec {container_name} /opt/seafile/seafile-server-latest/seafile.sh stop')
# Dump the databases
if db_dump:
if not os.path.exists(db_dump_tmp_path):
os.system(f"mkdir -p {db_dump_tmp_path}")
for database in databases.split(','):
os.system(f'mariadb-dump -h {db_dump_host} -u {db_dump_user} -p{db_dump_password} --skip-opt\
{database} > {db_dump_tmp_path}{database}.{now.strftime("%m-%d-%Y_%H-%M-%S")}.sql')
# Create the config
if rclone_config_create:
rclone_config_check = str(subprocess.check_output(["rclone", "config", "file"]))
if "doesn't exist" in rclone_config_check:
os.system(f"rclone config create {rclone_remote} {rclone_backend} provider={rclone_provider}\
endpoint={rclone_endpoint} env_auth=true")
# Local rclone backup
if rclone_copy:
os.system(f'rclone sync -P {seafile_data_local} {seafile_data_backup}')
# Remote rclone backup
if rclone_push:
os.system(f'rclone sync -P {seafile_data_local} {rclone_remote}:{rclone_remote_path}')
# Remote restic backup
if restic_push:
os.system("/opt/restic.sh")
# Start seafile and seafile hub
if docker_command:
os.system(f'docker exec {container_name} /opt/seafile/seafile-server-latest/seafile.sh start')
os.system(f'docker exec {container_name} /opt/seafile/seafile-server-latest/seahub.sh start')
# compress db files
if zip_db_files:
os.system(f'zip -r {db_dump_tmp_path}/sfdb_{now.strftime("%m-%d-%Y_%H-%M-%S")} {db_dump_tmp_path}')
os.system(f'rm {db_dump_tmp_path}*.sql')
# offload db file
if offload_db_files:
os.system(f'rclone copy -P {db_dump_tmp_path} {rclone_remote}:{rclone_remote_db_path}')
# cleanup
if cleanup:
os.system(f'rm {db_dump_tmp_path}*sfdb_*')
os.system(f'rclone delete -P {rclone_db_retention} {rclone_remote}:{rclone_remote_db_path}')
# healthcheck
if healthcheck:
healthcheck_ping(r, healthcheck_url)
# notification
if notify:
import os
from datetime import datetime
from dotenv import load_dotenv
from AppriseClient import apprise_notify
from HealthchecksIO import healthcheck_ping
import requests as r
import subprocess
now = datetime.now()
r.packages.urllib3.disable_warnings()
load_dotenv()
def to_bool(value):
"""
Converts 'something' to boolean. Raises exception for invalid formats
Possible True values: 1, True, "1", "TRue", "yes", "y", "t"
Possible False values: 0, False, "0", "faLse", "no", "n", "f"
"""
if str(value).lower() in ("yes", "y", "true", "t", "1"): return True
if str(value).lower() in ("no", "n", "false", "f", "0"): return False
raise Exception('Invalid value for boolean conversion: ' + str(value) + \
f'\nPossible True values: 1, True, "1", "TRue", "yes", "y", "t"\
\nPossible False values: 0, False, "0", "faLse", "no", "n", "f"')
# switches
docker_command = to_bool(os.getenv("docker_command"))
rclone_copy = to_bool(os.getenv("rclone_copy"))
rclone_config_create = to_bool(os.getenv("rclone_config_create"))
rclone_push = to_bool(os.getenv("rclone_push"))
restic_push = to_bool(os.getenv("restic_push"))
db_dump = to_bool(os.getenv("db_dump"))
zip_db_files = to_bool(os.getenv("zip_db_files"))
offload_db_files = to_bool(os.getenv("offload_db_files"))
cleanup = to_bool(os.getenv("cleanup"))
healthcheck = to_bool(os.getenv("healthcheck"))
notify = to_bool(os.getenv("notify"))
LOG_PATH = os.getenv("LOG_PATH")
# docker
container_name = os.getenv("container_name")
# data folders
seafile_data_local = os.getenv("seafile_data_local")
seafile_data_backup = os.getenv("seafile_data_backup")
# databases
databases = os.getenv("databases")
db_dump_host = os.getenv("db_dump_host")
db_dump_user = os.getenv("db_dump_user")
db_dump_password = os.getenv("db_dump_password")
db_dump_tmp_path = os.getenv("db_dump_tmp_path")
# Rclone remote
rclone_config_path = os.getenv("rclone_config_path")
rclone_remote = os.getenv("rclone_remote")
rclone_backend = os.getenv("rclone_backend")
rclone_provider = os.getenv("rclone_provider")
rclone_endpoint = os.getenv("rclone_endpoint")
rclone_remote_path = os.getenv("rclone_remote_path")
rclone_remote_db_path = os.getenv("rclone_remote_db_path")
rclone_environment_auth = os.getenv("rclone_environment_auth")
rclone_db_retention = os.getenv("rclone_db_retention")
# Restic remote
RESTIC_REPOSITORY = os.getenv("RESTIC_REPOSITORY")
AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")
RESTIC_PASSWORD = os.getenv("RESTIC_PASSWORD")
# healthchecks
healthcheck_url = os.getenv("healthcheck_url")
# notify
apprise_apprise_url = os.getenv("apprise_apprise_url")
apprise_aurls = os.getenv("apprise_aurls")
apprise_title = os.getenv("apprise_title")
apprise_body = os.getenv("apprise_body")
# healthcheck - Tell healthchecks.io we are starting the backup
if healthcheck:
healthcheck_ping(r, healthcheck_url, start=True)
# Stop seafile and seafile hub
if docker_command:
os.system(f'docker exec {container_name} /opt/seafile/seafile-serverlatest/seahub.sh stop')
os.system(f'docker exec {container_name} /opt/seafile/seafile-server-latest/seafile.sh stop')
# Dump the databases
if db_dump:
if not os.path.exists(db_dump_tmp_path):
os.system(f"mkdir -p {db_dump_tmp_path}")
for database in databases.split(','):
os.system(f'mariadb-dump -h {db_dump_host} -u {db_dump_user} -p{db_dump_password} --skip-opt\
{database} > {db_dump_tmp_path}{database}.{now.strftime("%m-%d-%Y_%H-%M-%S")}.sql')
# Create the config
if rclone_config_create:
rclone_config_check = str(subprocess.check_output(["rclone", "config", "file"]))
if "doesn't exist" in rclone_config_check:
os.system(f"rclone config create {rclone_remote} {rclone_backend} provider={rclone_provider}\
endpoint={rclone_endpoint} env_auth=true")
# Local rclone backup
if rclone_copy:
os.system(f'rclone sync -P {seafile_data_local} {seafile_data_backup}')
# Remote rclone backup
if rclone_push:
os.system(f'rclone sync -P {seafile_data_local} {rclone_remote}:{rclone_remote_path}')
# Remote restic backup
if restic_push:
os.system("/opt/restic.sh")
# Start seafile and seafile hub
if docker_command:
os.system(f'docker exec {container_name} /opt/seafile/seafile-server-latest/seafile.sh start')
os.system(f'docker exec {container_name} /opt/seafile/seafile-server-latest/seahub.sh start')
# compress db files
if zip_db_files:
os.system(f'zip -r {db_dump_tmp_path}/sfdb_{now.strftime("%m-%d-%Y_%H-%M-%S")} {db_dump_tmp_path}')
os.system(f'rm {db_dump_tmp_path}*.sql')
# offload db file
if offload_db_files:
os.system(f'rclone copy -P {db_dump_tmp_path} {rclone_remote}:{rclone_remote_db_path}')
# cleanup
if cleanup:
os.system(f'rm {db_dump_tmp_path}*sfdb_*')
os.system(f'rclone delete -P {rclone_db_retention} {rclone_remote}:{rclone_remote_db_path}')
# healthcheck
if healthcheck:
healthcheck_ping(r, healthcheck_url)
# notification
if notify:
apprise_notify(r, apprise_apprise_url, apprise_aurls, apprise_title, apprise_body)