This guide walks you through setting up a fully automated Docker backup system on your OpenMediaVault NAS, and how to restore it after a fresh install. You'll also learn how to install Docker and Portainer again from scratch.
🧠 Overview
We’ll use systemd
timers to automate a script that backs up key
Docker folders — volumes
, containers
,
network
, Config
, and swarm
— and
compress them to an archive.
Optional Step
📢 use this Telegram script regularly to get instant alerts from server. It's lightweight, fast, and keeps me in the loop 24/7. Highly recommended for any automation or backup setup! 💡
🔗Level Up Your Telegram Notifications: Secure Credentials with a .env File
🛠 Step 1: Create the Backup Script
sudo nano /usr/local/bin/docker_backup.sh
#!/bin/bash
# === CONFIGURATION ===
TIMESTAMP=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_DIR="/srv/personal/backups/docker-backups"
LOG_DIR="/srv/personal/backups/docker-logs"
LOG_FILE="$LOG_DIR/backup_$TIMESTAMP.log"
TELEGRAM_SCRIPT="/usr/local/bin/telegram_notify.sh"
# DB container settings
NEXTCLOUD_DB_CONTAINER="nextcloud_db"
NEXTCLOUD_DB_NAME="nextcloud"
NEXTCLOUD_DB_USER="root"
NEXTCLOUD_DB_PASS="your_user_password"
NEXTCLOUD_DB_DUMP_FILE="$BACKUP_DIR/nextcloud-db_$TIMESTAMP.sql"
IMMICH_DB_CONTAINER="immich_postgres"
IMMICH_DB_NAME="immich"
IMMICH_DB_DUMP_FILE="$BACKUP_DIR/immich-db_$TIMESTAMP.sql"
WEBTREES_DB_CONTAINER="webtrees-db"
WEBTREES_DB_NAME="webtrees"
WEBTREES_DB_USER="webtrees_user"
WEBTREES_DB_PASS="your_user_password"
WEBTREES_DB_DUMP_FILE="$BACKUP_DIR/webtrees-db_$TIMESTAMP.sql"
# Docker folders
DOCKER_PATH="/var/lib/docker"
DOCKER_SUBDIRS=("containers" "volumes" "network" "swarm" "config")
DOCKER_BACKUP_PATH="$BACKUP_DIR/docker"
# === FUNCTIONS ===
log() {
echo "[$(date +"%Y-%m-%d %H:%M:%S")] $1" | tee -a "$LOG_FILE"
}
send_telegram() {
[ -x "$TELEGRAM_SCRIPT" ] && "$TELEGRAM_SCRIPT" "$1"
}
# === START ===
mkdir -p "$BACKUP_DIR" "$LOG_DIR"
log "[INFO] Starting NAS backup..."
send_telegram "🚀 NAS Backup started at $TIMESTAMP"
# === NEXTCLOUD ===
log "[INFO] Dumping Nextcloud DB..."
docker exec "$NEXTCLOUD_DB_CONTAINER" sh -c "exec mysqldump -u$NEXTCLOUD_DB_USER -p$NEXTCLOUD_DB_PASS $NEXTCLOUD_DB_NAME" > "$NEXTCLOUD_DB_DUMP_FILE"
if [ $? -eq 0 ]; then
log "[SUCCESS] Nextcloud DB saved: $NEXTCLOUD_DB_DUMP_FILE"
send_telegram "✅ Nextcloud DB backup done: $(basename "$NEXTCLOUD_DB_DUMP_FILE")"
ls -1t "$BACKUP_DIR"/nextcloud-db_*.sql | tail -n +2 | xargs -r rm -f
else
log "[ERROR] Nextcloud DB dump failed!"
send_telegram "❌ Nextcloud DB backup failed!"
exit 1
fi
# === IMMICH ===
log "[INFO] Dumping Immich DB..."
docker exec -t "$IMMICH_DB_CONTAINER" pg_dump -U postgres "$IMMICH_DB_NAME" > "$IMMICH_DB_DUMP_FILE"
if [ $? -eq 0 ]; then
log "[SUCCESS] Immich DB saved: $IMMICH_DB_DUMP_FILE"
send_telegram "✅ Immich DB backup done: $(basename "$IMMICH_DB_DUMP_FILE")"
ls -1t "$BACKUP_DIR"/immich-db_*.sql | tail -n +2 | xargs -r rm -f
else
log "[ERROR] Immich DB dump failed!"
send_telegram "❌ Immich DB backup failed!"
exit 1
fi
# === WEBTREES ===
log "[INFO] Dumping Webtrees DB..."
docker exec "$WEBTREES_DB_CONTAINER" sh -c "exec mysqldump -u$WEBTREES_DB_USER -p$WEBTREES_DB_PASS $WEBTREES_DB_NAME" > "$WEBTREES_DB_DUMP_FILE"
if [ $? -eq 0 ]; then
log "[SUCCESS] Webtrees DB saved: $WEBTREES_DB_DUMP_FILE"
send_telegram "✅ Webtrees DB backup done: $(basename "$WEBTREES_DB_DUMP_FILE")"
ls -1t "$BACKUP_DIR"/webtrees-db_*.sql | tail -n +2 | xargs -r rm -f
else
log "[ERROR] Webtrees DB dump failed!"
send_telegram "❌ Webtrees DB backup failed!"
exit 1
fi
# === BACKUP DOCKER CONFIGS ===
log "[INFO] Stopping Docker..."
systemctl stop docker || { log "[ERROR] Docker stop failed!"; send_telegram "❌ Docker stop failed!"; exit 1; }
log "[INFO] Backing up Docker config folders..."
mkdir -p "$DOCKER_BACKUP_PATH"
for dir in "${DOCKER_SUBDIRS[@]}"; do
rsync -aHAX --delete "$DOCKER_PATH/$dir" "$DOCKER_BACKUP_PATH"
done
log "[INFO] Starting Docker..."
systemctl start docker || { log "[ERROR] Docker start failed!"; send_telegram "❌ Docker start failed!"; exit 1; }
# === CLEAN OLD LOG FILES ===
find "$LOG_DIR" -type f -name "backup_*.log" -mtime +7 -exec rm -f {} \;
log "[SUCCESS] NAS backup completed."
send_telegram "✅ NAS Backup completed successfully at $(date +"%Y-%m-%d %H:%M:%S")"
sudo chmod +x /usr/local/bin/docker_backup.sh
🗜 Step 2: Create the Compression Script
sudo nano /usr/local/bin/docker_compress.sh
#!/bin/bash
# === CONFIGURATION ===
SRC_DIR="/srv/personal/backups/docker-backups"
ARCHIVE_DIR="/srv/personal/backups/docker-archives"
TIMESTAMP=$(date +"%Y-%m-%d_%H-%M-%S")
ARCHIVE_FILE="$ARCHIVE_DIR/docker-backup-$TIMESTAMP.tar.gz"
TEMP_ARCHIVE_FILE="$ARCHIVE_DIR/docker-backup-temp.tar.gz"
# External Telegram script
TELEGRAM_SCRIPT="/usr/local/bin/telegram_notify.sh"
# === Create archive directory if not exists ===
mkdir -p "$ARCHIVE_DIR"
# === Function to send Telegram message ===
send_telegram() {
local message="$1"
if [ -x "$TELEGRAM_SCRIPT" ]; then
"$TELEGRAM_SCRIPT" "$message"
fi
}
send_telegram "📦 [INFO] Starting Docker backup compression..."
# === Compress backup to temporary archive ===
echo "[INFO] Compressing backup to temporary archive..."
tar -czf "$TEMP_ARCHIVE_FILE" -C "$SRC_DIR" .
if [ $? -eq 0 ]; then
mv "$TEMP_ARCHIVE_FILE" "$ARCHIVE_FILE"
echo "[SUCCESS] Backup compressed to: $ARCHIVE_FILE"
send_telegram "✅ [SUCCESS] Backup compressed: $(basename "$ARCHIVE_FILE")"
else
echo "[ERROR] Compression failed!"
send_telegram "❌ [ERROR] Docker backup compression failed!"
exit 1
fi
# === Safely delete old archives, keep only the latest ===
echo "[INFO] Cleaning up old archives..."
ls -1tr "$ARCHIVE_DIR"/docker-backup-*.tar.gz | head -n -1 | xargs -r rm -f
echo "[INFO] Old archives deleted. Only the latest one is retained."
send_telegram "🧹 [INFO] Old archives removed. Only the latest backup is kept."
send_telegram "🎉 [DONE] Docker backup archive process completed successfully."
sudo chmod +x /usr/local/bin/docker_compress.sh
🛠️ Step-by-Step: Upload Backup Archive to Google Drive Using Rclone
1. Install Rclone
Begin by installing Rclone on your OpenMediaVault system:
curl https://rclone.org/install.sh | sudo bash
2. Configure Rclone for Google Drive
For detailed instructions on setting up Google Drive with Rclone, refer to the official guide here: How to Set Up Google Drive on Rclone.
3. Add Rclone Upload script
Add a new Rclone Upload script
nano /usr/local/bin/docker_upload_to_gdrive.sh
use the following script to upload your Docker backup archive to Google Drive:
#!/bin/bash
# === CONFIGURATION ===
ARCHIVE_DIR="/srv/personal/backups/docker-archives"
RCLONE_REMOTE="gdrive"
RCLONE_FOLDER="docker-backups"
TELEGRAM_SCRIPT="/usr/local/bin/telegram_notify.sh"
# === FIND LATEST BACKUP FILE ===
LATEST_BACKUP=$(ls -t "$ARCHIVE_DIR"/docker-backup-*.tar.gz 2>/dev/null | head -n 1)
if [ -z "$LATEST_BACKUP" ]; then
echo "[ERROR] No backup archive found in $ARCHIVE_DIR"
if [ -x "$TELEGRAM_SCRIPT" ]; then
"$TELEGRAM_SCRIPT" "❌ No Docker backup archive found to upload to Google Drive!"
fi
exit 1
fi
# === FUNCTION: SEND TELEGRAM MESSAGE ===
send_telegram() {
if [ -x "$TELEGRAM_SCRIPT" ]; then
"$TELEGRAM_SCRIPT" "$1"
fi
}
# === UPLOAD TO GOOGLE DRIVE ===
echo "[INFO] Uploading $(basename "$LATEST_BACKUP") to Google Drive..."
rclone copy "$LATEST_BACKUP" "$RCLONE_REMOTE:$RCLONE_FOLDER"
if [ $? -eq 0 ]; then
echo "[SUCCESS] Backup uploaded successfully to Google Drive."
send_telegram "✅ Docker backup uploaded: $(basename "$LATEST_BACKUP")"
# === DELETE OLD BACKUPS AFTER SUCCESSFUL UPLOAD ===
echo "[INFO] Looking for older backups to delete..."
OLD_BACKUPS=$(rclone lsf "$RCLONE_REMOTE:$RCLONE_FOLDER" --files-only | grep '^docker-backup-.*\.tar\.gz$' | grep -v "$(basename "$LATEST_BACKUP")")
if [ -n "$OLD_BACKUPS" ]; then
while IFS= read -r file; do
echo "[INFO] Deleting old backup: $file"
rclone deletefile "$RCLONE_REMOTE:$RCLONE_FOLDER/$file"
done <<< "$OLD_BACKUPS"
echo "[INFO] Old backups deleted."
else
echo "[INFO] No old backups found to delete."
fi
else
echo "[ERROR] Failed to upload backup to Google Drive!"
send_telegram "❌ Docker backup upload to Google Drive failed!"
fi
🔐 Permissions
Make the script executable:
chmod +x /usr/local/bin/docker_upload_to_gdrive.sh
This script will find the latest Docker backup archive and upload it to your Google Drive, notifying you via Telegram about the success or failure of the operation.
⏰ Step 3: Automate It with systemd
1. Create the Service
sudo nano /etc/systemd/system/docker-backup.service
[Unit]
Description=Docker Backup Script
After=network-online.target docker.service
Wants=network-online.target docker-backup.timer
[Service]
Type=oneshot
ExecStartPre=/bin/sleep 300
ExecStart=/usr/local/bin/docker_backup.sh
ExecStartPost=/usr/local/bin/docker_compress.sh
ExecStartPost=/usr/local/bin/docker_upload_to_gdrive.sh
2. Create the Timer
sudo nano /etc/systemd/system/docker-backup.timer
[Unit]
Description=Run Docker Backup Daily
[Timer]
OnCalendar=*-*-* 03:05:00
Persistent=true
[Install]
WantedBy=timers.target
3. Reload & Enable
sudo systemctl daemon-reexec
sudo systemctl daemon-reload
sudo systemctl enable --now docker-backup.timer
🧪 Test It
sudo systemctl start docker-backup.service
systemctl list-timers | grep docker-backup
ls -lh /srv/personal/docker-archives/
journalctl -u docker-backup.service --no-pager --since "5 minutes ago"
📁 Backup Structure
Here's how the backup directories are organized:
/srv/personal/docker-backups/
├── volumes/
├── containers/
├── network/
├── config/
├── swarm/
├── immich_DB/
├── webtrees_DB/
└── nextcloud_DB/
Compressed archives are stored separately for easy retrieval:
/srv/personal/docker-archives/
└── docker-backup-YYYY-MM-DD_HH-MM-SS.tar.gz