Automatische Datenbank-Backups sind unverzichtbar für jeden Server. Mit Cron-Jobs lassen sich regelmäßige Sicherungen einfach einrichten.

Backup-Strategie

3-2-1-Regel

3 Kopien der Daten
2 verschiedene Medien
1 Offsite-Backup

Backup-Typen

| Typ | Häufigkeit | Aufbewahrung | |-----|------------|--------------| | Täglich | Jede Nacht | 7 Tage | | Wöchentlich | Sonntag | 4 Wochen | | Monatlich | 1. des Monats | 12 Monate |

MySQL/MariaDB Backup

Einfaches Backup-Skript

#!/bin/bash
# /usr/local/bin/backup-mysql.sh

# Konfiguration
BACKUP_DIR="/backup/mysql"
DATE=$(date +%Y-%m-%d_%H%M%S)
MYSQL_USER="backup"
MYSQL_PASS="sicheres_passwort"

# Backup-Verzeichnis erstellen
mkdir -p "$BACKUP_DIR"

# Alle Datenbanken sichern
mysqldump -u "$MYSQL_USER" -p"$MYSQL_PASS" \
    --all-databases \
    --single-transaction \
    --routines \
    --triggers \
    --events | gzip > "$BACKUP_DIR/all_databases_$DATE.sql.gz"

# Exit-Code speichern
echo $?

Backup-Benutzer erstellen

-- Minimale Rechte für Backup-User
CREATE USER 'backup'@'localhost' IDENTIFIED BY 'sicheres_passwort';
GRANT SELECT, SHOW VIEW, RELOAD, REPLICATION CLIENT, EVENT, TRIGGER, LOCK TABLES ON *.* TO 'backup'@'localhost';
FLUSH PRIVILEGES;

Credentials-Datei (sicherer)

# ~/.my.cnf oder /etc/mysql/backup.cnf

[client]
user=backup
password=sicheres_passwort
# Skript ohne Passwort im Klartext
mysqldump --defaults-extra-file=/etc/mysql/backup.cnf \
    --all-databases > backup.sql

Einzelne Datenbanken

#!/bin/bash
# /usr/local/bin/backup-mysql-separate.sh

BACKUP_DIR="/backup/mysql"
DATE=$(date +%Y-%m-%d)
MYSQL_OPTS="--defaults-extra-file=/etc/mysql/backup.cnf"

mkdir -p "$BACKUP_DIR/$DATE"

# Liste aller Datenbanken
DATABASES=$(mysql $MYSQL_OPTS -N -e "SHOW DATABASES" | grep -Ev "^(information_schema|performance_schema|sys)$")

for DB in $DATABASES; do
    mysqldump $MYSQL_OPTS --single-transaction "$DB" | gzip > "$BACKUP_DIR/$DATE/${DB}.sql.gz"
    echo "Backed up: $DB"
done

PostgreSQL Backup

pg_dump-Skript

#!/bin/bash
# /usr/local/bin/backup-postgres.sh

BACKUP_DIR="/backup/postgres"
DATE=$(date +%Y-%m-%d_%H%M%S)
PGUSER="postgres"
export PGPASSWORD="sicheres_passwort"

mkdir -p "$BACKUP_DIR"

# Alle Datenbanken
pg_dumpall -U "$PGUSER" | gzip > "$BACKUP_DIR/all_databases_$DATE.sql.gz"

unset PGPASSWORD

Mit .pgpass (sicherer)

# ~/.pgpass
# Format: hostname:port:database:username:password
localhost:5432:*:postgres:sicheres_passwort

# Berechtigungen setzen
chmod 600 ~/.pgpass

Einzelne Datenbanken

#!/bin/bash
# /usr/local/bin/backup-postgres-separate.sh

BACKUP_DIR="/backup/postgres"
DATE=$(date +%Y-%m-%d)
PGUSER="postgres"

mkdir -p "$BACKUP_DIR/$DATE"

# Liste aller Datenbanken
DATABASES=$(psql -U "$PGUSER" -t -c "SELECT datname FROM pg_database WHERE datistemplate = false AND datname != 'postgres'")

for DB in $DATABASES; do
    pg_dump -U "$PGUSER" "$DB" | gzip > "$BACKUP_DIR/$DATE/${DB}.sql.gz"
    echo "Backed up: $DB"
done

Backup-Rotation

Einfache Rotation

#!/bin/bash
# /usr/local/bin/rotate-backups.sh

BACKUP_DIR="/backup"

# Tägliche Backups: 7 Tage behalten
find "$BACKUP_DIR/daily" -name "*.sql.gz" -mtime +7 -delete

# Wöchentliche Backups: 4 Wochen behalten
find "$BACKUP_DIR/weekly" -name "*.sql.gz" -mtime +28 -delete

# Monatliche Backups: 12 Monate behalten
find "$BACKUP_DIR/monthly" -name "*.sql.gz" -mtime +365 -delete

Grandfather-Father-Son

#!/bin/bash
# /usr/local/bin/backup-gfs.sh

BACKUP_DIR="/backup/mysql"
DATE=$(date +%Y-%m-%d)
DAY_OF_WEEK=$(date +%u)
DAY_OF_MONTH=$(date +%d)
MYSQL_OPTS="--defaults-extra-file=/etc/mysql/backup.cnf"

# Verzeichnisse
mkdir -p "$BACKUP_DIR"/{daily,weekly,monthly}

# Backup erstellen
BACKUP_FILE="backup_$DATE.sql.gz"
mysqldump $MYSQL_OPTS --all-databases --single-transaction | gzip > "/tmp/$BACKUP_FILE"

# Tägliches Backup
cp "/tmp/$BACKUP_FILE" "$BACKUP_DIR/daily/"

# Wöchentlich (Sonntag)
if [ "$DAY_OF_WEEK" -eq 7 ]; then
    cp "/tmp/$BACKUP_FILE" "$BACKUP_DIR/weekly/"
fi

# Monatlich (1. des Monats)
if [ "$DAY_OF_MONTH" -eq "01" ]; then
    cp "/tmp/$BACKUP_FILE" "$BACKUP_DIR/monthly/"
fi

# Aufräumen
rm "/tmp/$BACKUP_FILE"

# Alte Backups löschen
find "$BACKUP_DIR/daily" -name "*.sql.gz" -mtime +7 -delete
find "$BACKUP_DIR/weekly" -name "*.sql.gz" -mtime +28 -delete
find "$BACKUP_DIR/monthly" -name "*.sql.gz" -mtime +365 -delete

Cron-Einrichtung

Crontab bearbeiten

# Als Root
crontab -e

# Oder
vim /etc/cron.d/database-backup

Beispiel-Cronjobs

# /etc/cron.d/database-backup

# Täglich um 2:00 Uhr
0 2 * * * root /usr/local/bin/backup-mysql.sh >> /var/log/backup.log 2>&1

# Alle 6 Stunden
0 */6 * * * root /usr/local/bin/backup-mysql.sh >> /var/log/backup.log 2>&1

# Wöchentlich Sonntag 3:00 Uhr
0 3 * * 0 root /usr/local/bin/backup-mysql.sh >> /var/log/backup.log 2>&1

# Rotation täglich um 4:00 Uhr
0 4 * * * root /usr/local/bin/rotate-backups.sh >> /var/log/backup.log 2>&1

Offsite-Backup

Zu Remote-Server

#!/bin/bash
# /usr/local/bin/backup-offsite.sh

LOCAL_BACKUP="/backup/mysql/daily"
REMOTE_USER="backup"
REMOTE_HOST="backup-server.example.com"
REMOTE_DIR="/backup/$(hostname)"

# Mit rsync synchronisieren
rsync -avz --delete \
    -e "ssh -i /root/.ssh/backup_key" \
    "$LOCAL_BACKUP/" \
    "$REMOTE_USER@$REMOTE_HOST:$REMOTE_DIR/"

Zu S3/MinIO

#!/bin/bash
# /usr/local/bin/backup-s3.sh

BACKUP_FILE="/tmp/backup_$(date +%Y-%m-%d).sql.gz"
S3_BUCKET="s3://my-backups/mysql/"

# Backup erstellen
mysqldump --defaults-extra-file=/etc/mysql/backup.cnf \
    --all-databases --single-transaction | gzip > "$BACKUP_FILE"

# Zu S3 hochladen
aws s3 cp "$BACKUP_FILE" "$S3_BUCKET"

# Lokale Datei löschen
rm "$BACKUP_FILE"

# Alte Backups in S3 löschen (älter als 30 Tage)
aws s3 ls "$S3_BUCKET" | while read -r line; do
    DATE=$(echo "$line" | awk '{print $1}')
    FILE=$(echo "$line" | awk '{print $4}')
    if [[ $(date -d "$DATE" +%s) -lt $(date -d '30 days ago' +%s) ]]; then
        aws s3 rm "${S3_BUCKET}${FILE}"
    fi
done

Mit rclone

# rclone konfigurieren
rclone config

# Backup hochladen
rclone copy /backup/mysql remote:backups/mysql

# Sync (löscht entfernte alte Dateien)
rclone sync /backup/mysql remote:backups/mysql

Benachrichtigungen

E-Mail bei Fehler

#!/bin/bash
# /usr/local/bin/backup-with-notification.sh

LOG_FILE="/var/log/backup.log"
ADMIN_EMAIL="admin@example.com"
BACKUP_SCRIPT="/usr/local/bin/backup-mysql.sh"

# Backup ausführen
$BACKUP_SCRIPT > "$LOG_FILE" 2>&1
EXIT_CODE=$?

# Bei Fehler E-Mail senden
if [ $EXIT_CODE -ne 0 ]; then
    echo "Backup failed on $(hostname)" | mail -s "BACKUP FAILED" -A "$LOG_FILE" "$ADMIN_EMAIL"
fi

exit $EXIT_CODE

Slack-Benachrichtigung

#!/bin/bash
# Slack Webhook Notification

SLACK_WEBHOOK="https://hooks.slack.com/services/xxx/yyy/zzz"

send_slack() {
    local status="$1"
    local message="$2"

    curl -X POST -H 'Content-type: application/json' \
        --data "{\"text\":\"$status: $message\"}" \
        "$SLACK_WEBHOOK"
}

# Im Backup-Skript verwenden
if [ $EXIT_CODE -eq 0 ]; then
    send_slack ":white_check_mark:" "Backup completed successfully on $(hostname)"
else
    send_slack ":x:" "Backup FAILED on $(hostname)"
fi

Healthchecks.io

#!/bin/bash
# Healthchecks.io Ping

HEALTHCHECK_URL="https://hc-ping.com/your-uuid-here"

# Backup ausführen
/usr/local/bin/backup-mysql.sh

# Bei Erfolg pingen
if [ $? -eq 0 ]; then
    curl -fsS -m 10 --retry 5 "$HEALTHCHECK_URL"
else
    curl -fsS -m 10 --retry 5 "$HEALTHCHECK_URL/fail"
fi

Backup-Verifizierung

Automatische Prüfung

#!/bin/bash
# /usr/local/bin/verify-backup.sh

BACKUP_FILE="$1"
TEST_DB="backup_test_$(date +%s)"

# Backup entpacken und in Test-DB importieren
gunzip < "$BACKUP_FILE" | mysql -u root -p"$MYSQL_PASS" -e "CREATE DATABASE $TEST_DB"
gunzip < "$BACKUP_FILE" | mysql -u root -p"$MYSQL_PASS" "$TEST_DB"

# Tabellen zählen
TABLE_COUNT=$(mysql -u root -p"$MYSQL_PASS" -N -e "SELECT COUNT(*) FROM information_schema.tables WHERE table_schema='$TEST_DB'")

if [ "$TABLE_COUNT" -gt 0 ]; then
    echo "Backup verified: $TABLE_COUNT tables found"
    RESULT=0
else
    echo "Backup verification FAILED"
    RESULT=1
fi

# Test-DB löschen
mysql -u root -p"$MYSQL_PASS" -e "DROP DATABASE $TEST_DB"

exit $RESULT

Prüfsummen

#!/bin/bash
# Checksummen erstellen

BACKUP_FILE="/backup/mysql/backup.sql.gz"

# SHA256 erstellen
sha256sum "$BACKUP_FILE" > "$BACKUP_FILE.sha256"

# Später verifizieren
sha256sum -c "$BACKUP_FILE.sha256"

Komplettes Backup-System

#!/bin/bash
# /usr/local/bin/backup-complete.sh

set -e

# Konfiguration
BACKUP_DIR="/backup/mysql"
LOG_FILE="/var/log/backup.log"
ADMIN_EMAIL="admin@example.com"
SLACK_WEBHOOK="https://hooks.slack.com/..."
MYSQL_OPTS="--defaults-extra-file=/etc/mysql/backup.cnf"
DATE=$(date +%Y-%m-%d_%H%M%S)
HOSTNAME=$(hostname)

# Logging-Funktion
log() {
    echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" | tee -a "$LOG_FILE"
}

# Slack-Notification
notify_slack() {
    curl -s -X POST -H 'Content-type: application/json' \
        --data "{\"text\":\"$1\"}" "$SLACK_WEBHOOK" > /dev/null
}

# Cleanup bei Fehler
cleanup() {
    log "Cleanup..."
    rm -f "/tmp/backup_*.sql.gz"
}
trap cleanup EXIT

log "Starting backup on $HOSTNAME"

# Verzeichnisse erstellen
mkdir -p "$BACKUP_DIR"/{daily,weekly,monthly}

# Backup erstellen
log "Creating database dump..."
BACKUP_FILE="$BACKUP_DIR/daily/backup_$DATE.sql.gz"
mysqldump $MYSQL_OPTS --all-databases --single-transaction --routines --triggers | gzip > "$BACKUP_FILE"

# Größe prüfen
SIZE=$(stat -c%s "$BACKUP_FILE")
if [ "$SIZE" -lt 1000 ]; then
    log "ERROR: Backup file too small ($SIZE bytes)"
    notify_slack ":x: Backup FAILED on $HOSTNAME: File too small"
    exit 1
fi

log "Backup created: $(du -h "$BACKUP_FILE" | cut -f1)"

# Checksumme
sha256sum "$BACKUP_FILE" > "$BACKUP_FILE.sha256"

# Wöchentlich/Monatlich kopieren
DAY_OF_WEEK=$(date +%u)
DAY_OF_MONTH=$(date +%d)

if [ "$DAY_OF_WEEK" -eq 7 ]; then
    cp "$BACKUP_FILE" "$BACKUP_DIR/weekly/"
    log "Weekly backup created"
fi

if [ "$DAY_OF_MONTH" -eq "01" ]; then
    cp "$BACKUP_FILE" "$BACKUP_DIR/monthly/"
    log "Monthly backup created"
fi

# Rotation
log "Rotating old backups..."
find "$BACKUP_DIR/daily" -name "*.sql.gz" -mtime +7 -delete
find "$BACKUP_DIR/weekly" -name "*.sql.gz" -mtime +28 -delete
find "$BACKUP_DIR/monthly" -name "*.sql.gz" -mtime +365 -delete

# Offsite sync
log "Syncing to offsite..."
rsync -avz --delete -e "ssh -i /root/.ssh/backup_key" \
    "$BACKUP_DIR/" backup@remote:/backup/$HOSTNAME/

log "Backup completed successfully"
notify_slack ":white_check_mark: Backup completed on $HOSTNAME ($(du -h "$BACKUP_FILE" | cut -f1))"

exit 0

Zusammenfassung

| Datenbank | Backup-Befehl | |-----------|---------------| | MySQL | mysqldump --all-databases | | PostgreSQL | pg_dumpall | | SQLite | sqlite3 db .backup |

| Rotation | Aufbewahrung | |----------|--------------| | Täglich | 7 Tage | | Wöchentlich | 4 Wochen | | Monatlich | 12 Monate |

| Cron-Zeit | Bedeutung | |-----------|-----------| | 0 2 | Täglich 02:00 | | 0 /6 | Alle 6 Stunden | | 0 3 * 0 | Sonntag 03:00 |

Fazit

Automatische Datenbank-Backups sind mit Cron einfach einzurichten. Die Kombination aus lokalen und Offsite-Backups bietet maximale Sicherheit. Benachrichtigungen bei Fehlern ermöglichen schnelles Reagieren. Regelmäßige Backup-Verifizierung stellt die Wiederherstellbarkeit sicher. Ein durchdachtes Rotationsschema spart Speicherplatz und ermöglicht dennoch langfristige Wiederherstellung.