diff --git a/.gitignore b/.gitignore index 4e8cfc33..43407802 100644 --- a/.gitignore +++ b/.gitignore @@ -11,7 +11,8 @@ docker-compose.override.yml docker-compose.yml Dockerfile -/backups/*.sql +/backups/ +!/backups/.gitkeep /public/media/cache/* !/public/images/.gitkeep !/public/images/librairies diff --git a/doc/backup.md b/doc/backup.md new file mode 100644 index 00000000..7fad70ce --- /dev/null +++ b/doc/backup.md @@ -0,0 +1,23 @@ +You can create backup of your project in the `backups` folder. + +# Backup with rclone +- Install `rclone` and `gzip` on your system +- Create a rclone config file at `~/.config/rclone/rclone.conf` +- Copy the `backup.sh.example` script into `backups/backup.sh` +- Make the script executable with `chmod +x backups/backup.sh` +- Edit the script to match your configuration +- Setup a cronjob to run it periodically + +## Example of rclone.conf for exoscale + +``` +[biblioteca-backup] +type = s3 +provider = Other +env_auth = false +access_key_id = ... +secret_access_key = ... +region = ch-ge-2 +endpoint = sos-ch-ge-2.exo.io +acl = private +``` \ No newline at end of file diff --git a/doc/backup.sh.example b/doc/backup.sh.example new file mode 100644 index 00000000..b120490a --- /dev/null +++ b/doc/backup.sh.example @@ -0,0 +1,58 @@ +#!/bin/bash +set -e +SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" &> /dev/null && pwd) + +BUCKET=${BUCKET:-biblioteca-backup} +REMOTE=${REMOTE:-biblioteca-backup} + +if [ "$(command -v gzip)" = "" ]; then + echo "gzip is required to run this script" + exit 1 +fi + +if [ "$(command -v rclone)" = "" ]; then + echo "rclone is required to run this script" + exit 1 +fi + +BOOKS_DIR=$SCRIPT_DIR/../public/books +COVER_DIR=$SCRIPT_DIR/../public/covers +mkdir -p $BOOKS_DIR +mkdir -p $COVER_DIR + +filename=$(date +"dump-%Y%m%d-%H%M%S.sql.gz") +DUMP_LOCAL=$(realpath $SCRIPT_DIR/..)/$filename + + +# Generate the dump +if [ ! -f $DUMP_LOCAL ]; then + # Start the SQL Server + docker compose up -d --remove-orphans + # Create the dump + docker compose exec db bash -c 'mysqldump -h localhost -u "${MYSQL_USER}" -p"${MYSQL_PASSWORD}" "${MYSQL_DATABASE}"' | gzip > $DUMP_LOCAL +fi + +# Sync files +rclone sync -P $BOOKS_DIR $REMOTE:$BUCKET/biblioteca/books +rclone sync -P $COVER_DIR $REMOTE:$BUCKET/biblioteca/covers + +# Backup the dump +rclone copyto $DUMP_LOCAL $REMOTE:$BUCKET/biblioteca/data/$filename +rm -f $DUMP_LOCAL + +# Backup the script +rclone copyto $0 $REMOTE:$BUCKET/biblioteca/$(basename $0) + +# Remove old dumps +FILES=$(rclone lsf --format pt --absolute --files-only --max-depth 1 "${REMOTE}:${BUCKET}/biblioteca/data/" | awk 'BEGIN {OFS=","} {print $1,$2}' | sort -t ',' -k 2 -r) +mapfile -t FILE_ARRAY <<<"$FILES" +FILE_COUNT=${#FILE_ARRAY[@]} +KEEP=30 +DELETE_COUNT=$(( FILE_COUNT - KEEP )) +if [ "$DELETE_COUNT" -gt 0 ]; then + echo "Deleting $DELETE_COUNT old files..." + for (( i = KEEP; i < FILE_COUNT; i++ )); do + FILE_TO_DELETE=${REMOTE}:${BUCKET}/biblioteca/data$(echo ${FILE_ARRAY[$i]} | cut -d ';' -f 1) + rclone delete "$FILE_TO_DELETE" + done +fi