Backups
Nextcloud
Le backup export en utilisant la commande Export du Snap vers le
répertoire par défaut et non changeable
/var/snap/nextcloud/common/backups. Celui si est "symlinké" avec un
répertoire sur les disques grandes capacités /mnt/data/backup.
Le fichier obtenu est compressé et envoyé par rsync vers le NAS Synology FGM-ARBRE. Pour cela l'utilisateur rsync a été créé avec autorisation d'accès rsync, et répertoire de backup sur Arbre.
Le répertoire de données Nextcloud /data est directement synchronisé
avec son propre répertoire sur le backup d'Arbre.
Les logs sont sauvegardés dans /var/log/.
Les répertoires intermédiaires sont vidés avant traitement, les logs sont limités ansi que le nombre de fichier compressé de Nexcloud (database, paramétrages...)
#!/bin/bash # Variables NEXTCLOUD_EXPORT_DIR="/var/snap/nextcloud/common/backups" # Default Nextcloud Snap export directory LOCAL_TEMP_DIR="/mnt/data/backup/temp" # Temporary local storage NAS_USER="rsync" # Synology NAS username NAS_HOST="192.168.1.41" # Synology NAS hostname or IP SRV_CERTIFICATE="~/.ssh/rsync_key" NAS_BACKUP_DIR="/volume1/NextcloudBackUp" # Destination path on Synology NAS DATE=$(date +'%Y-%m-%d_%H-%M-%S') # Timestamp for backup LOG_DIR="/var/log" # Log directory LOG_FILE="$LOG_DIR/nextcloud_backup_$DATE.log" # New log file with date-based name MAX_BACKUPS=5 # Maximum number of backups to keep on NAS CONFIG_DB_ARCHIVE="$LOCAL_TEMP_DIR/nextcloud_config_db_$DATE.tar.gz" # Compressed export for config and DB DATA_DIR="/mnt/ncdata/data" # Nextcloud data directory # Ensure local temporary storage directory exists mkdir -p "$LOCAL_TEMP_DIR" # Clean up old log files to keep only the latest 10 echo "[$(date)] Cleaning up old log files..." | tee -a "$LOG_FILE" ls -t $LOG_DIR/nextcloud_backup*.log | tail -n +11 | xargs rm -f if [ $? -ne 0 ]; then echo "[$(date)] ERROR: Failed to clean up old log files." | tee -a "$LOG_FILE" exit 1 fi echo "[$(date)] Old log files cleaned successfully." | tee -a "$LOG_FILE" # Clean up the Nextcloud export directory before starting echo "[$(date)] Cleaning up the Nextcloud export directory..." | tee -a "$LOG_FILE" sudo rm -rf -- "$NEXTCLOUD_EXPORT_DIR"/* if [ $? -ne 0 ]; then echo "[$(date)] ERROR: Failed to clean up the Nextcloud export directory." | tee -a "$LOG_FILE" exit 1 fi sudo rm -rf -- "$LOCAL_TEMP_DIR"/* if [ $? -ne 0 ]; then echo "[$(date)] ERROR: Failed to clean up the Temporary Compress export directory." | tee -a "$LOG_FILE" exit 1 fi echo "[$(date)] Nextcloud export directories cleaned successfully." | tee -a "$LOG_FILE" # Perform full export of config and database using Snap export echo "[$(date)] Starting full export of Nextcloud configuration and database..." | tee -a "$LOG_FILE" sudo nextcloud.export -abc > /dev/null 2>&1 if [ $? -ne 0 ]; then echo "[$(date)] ERROR: Full export of Nextcloud configuration and database failed." | tee -a "$LOG_FILE" exit 1 fi echo "[$(date)] Export completed successfully." | tee -a "$LOG_FILE" # Compress the exported config and database echo "[$(date)] Compressing exported configuration and database..." | tee -a "$LOG_FILE" tar -czf "$CONFIG_DB_ARCHIVE" -C "$NEXTCLOUD_EXPORT_DIR" . if [ $? -ne 0 ]; then echo "[$(date)] ERROR: Compression of configuration and database export failed." | tee -a "$LOG_FILE" exit 1 fi # Transfer the compressed config and database archive to Synology NAS echo "[$(date)] Transferring compressed configuration and database to Synology NAS..." | tee -a "$LOG_FILE" rsync -a -e "ssh -i ~/.ssh/rsync_key" "$CONFIG_DB_ARCHIVE" "$NAS_USER@$NAS_HOST:$NAS_BACKUP_DIR/" if [ $? -ne 0 ]; then echo "[$(date)] ERROR: Transfer of configuration and database to Synology NAS failed." | tee -a "$LOG_FILE" exit 1 fi # Sync the data directory directly to Synology NAS echo "[$(date)] Syncing data directory to Synology NAS..." | tee -a "$LOG_FILE" rsync -a -e "ssh -i ~/.ssh/rsync_key" --delete "$DATA_DIR/" "$NAS_USER@$NAS_HOST:$NAS_BACKUP_DIR/data/" if [ $? -ne 0 ]; then echo "[$(date)] ERROR: Sync of data directory to Synology NAS failed." | tee -a "$LOG_FILE" exit 1 fi # Remove temporary local files to minimize space usage echo "[$(date)] Cleaning up local temporary files..." | tee -a "$LOG_FILE" rm -rf "$CONFIG_DB_ARCHIVE" "$LOCAL_TEMP_DIR"/* if [ $? -ne 0 ]; then echo "[$(date)] ERROR: Failed to clean up local temporary files." | tee -a "$LOG_FILE" exit 1 fi # Rotate backups on the Synology NAS to keep only the latest $MAX_BACKUPS echo "[$(date)] Rotating backups on Synology NAS..." | tee -a "$LOG_FILE" ssh "$NAS_USER@$NAS_HOST" "ls -t $NAS_BACKUP_DIR | tail -n +$(($MAX_BACKUPS + 1)) | xargs -I {} rm -rf "$NAS_BACKUP_DIR/{}"" if [ $? -ne 0 ]; then echo "[$(date)] ERROR: Backup rotation on Synology NAS failed." | tee -a "$LOG_FILE" exit 1 fi echo "[$(date)] Backup process completed successfully." | tee -a "$LOG_FILE" exit 0
Mailu
#!/bin/bash # Configuration MAILU_DATA="/path/to/mailu_data" # Path to Mailu data BACKUP_DEST="user@synology:/path/to/synology/backups/mailu" # Destination on Synology NAS BACKUP_NAME="mailu_backup_$(date +'%Y%m%d%H%M%S').tar.gz" # Backup file name with timestamp KEEP_BACKUPS=5 # Number of backups to retain LOG_FILE="/var/log/mailu_backup.log" # Log file location # Create a backup archive echo "$(date) - Starting Mailu backup..." >> $LOG_FILE tar -czf "/tmp/$BACKUP_NAME" -C "$MAILU_DATA" . >> $LOG_FILE 2>&1 if [ $? -ne 0 ]; then echo "$(date) - Backup creation failed!" >> $LOG_FILE exit 1 fi # Sync the backup to the NAS rsync -avz --remove-source-files "/tmp/$BACKUP_NAME" "$BACKUP_DEST" >> $LOG_FILE 2>&1 if [ $? -ne 0 ]; then echo "$(date) - Rsync to NAS failed!" >> $LOG_FILE exit 1 fi # Clean up older backups on the NAS echo "$(date) - Cleaning up old backups..." >> $LOG_FILE ssh user@synology "cd /path/to/synology/backups/mailu && ls -tp | grep -v '/$' | tail -n +$((KEEP_BACKUPS + 1)) | xargs -d 'n' -r rm --" >> $LOG_FILE 2>&1 if [ $? -eq 0 ]; then echo "$(date) - Old backups cleaned up successfully." >> $LOG_FILE else echo "$(date) - Failed to clean up old backups." >> $LOG_FILE fi echo "$(date) - Backup completed successfully." >> $LOG_FILE