init backup scripts

This commit is contained in:
Nico Haider
2026-04-27 17:45:35 +02:00
commit 0f290fa743
7 changed files with 407 additions and 0 deletions
+2
View File
@@ -0,0 +1,2 @@
logs
tmp
+45
View File
@@ -0,0 +1,45 @@
#!/bin/bash
set -euo pipefail
source /home/nico/backup/lib.sh
# ================================
# Einstellungen
# ================================
AFFINE_VOLUME_PATH="/home/nico/.affine"
BACKUP_DIR="/home/nico/backup/tmp/affine_backups"
REMOTE="onedrive-tdhaider:/IONOS/Backups"
DATE=$(date +%F_%H-%M-%S)
# ================================
# Vorbereitung
# ================================
mkdir -p "$BACKUP_DIR"
# ================================
# Affine Storage + Config Backup
# ================================
log "📦 Backup starte für Affine Files..."
tar czf "$BACKUP_DIR/affine-files-backup-$DATE.tar.gz" \
-C "$AFFINE_VOLUME_PATH" storage config \
|| fail "Fehler beim Affine File Backup!"
log "✅ Affine Dateien erfolgreich gesichert"
# ================================
# Upload
# ================================
log "☁️ Upload zu OneDrive..."
upload "$BACKUP_DIR/affine-files-backup-$DATE.tar.gz" "$REMOTE/affine-files"
log "✅ Upload abgeschlossen!"
cleanup_keep_latest "$REMOTE/affine-files" 14
log "✅ Cleanup abgeschlossen!"
# ================================
# Cleanup lokal
# ================================
rm -rf "$BACKUP_DIR"
log "🧹 Lokale Backups gelöscht"
log "🏁 Affine Backup fertig!"
+46
View File
@@ -0,0 +1,46 @@
#!/bin/bash
set -euo pipefail
source /home/nico/backup/lib.sh
# ================================
# Einstellungen
# ================================
GITEA_CONTAINER="gitea" # Name deines Gitea-Docker-Containers
BACKUP_DIR="/home/nico/backup/tmp/gitea_backups" # Temporärer Backup-Pfad auf dem Server
REMOTE="onedrive-tdhaider:/IONOS/Backups" # Basisziel auf OneDrive
DATE=$(date +%F_%H-%M-%S)
# ================================
# Backup-Ordner vorbereiten
# ================================
mkdir -p "$BACKUP_DIR"
# ================================
# Gitea Dateien sichern
# ================================
log "📦 Backup starte für Gitea Dateien..."
docker run --rm -v gitea_gitea-data:/data -v "$BACKUP_DIR":/backup alpine \
tar czf /backup/gitea-files-backup-$DATE.tar.gz -C /data git gitea \
|| fail "Fehler beim Backup der Gitea Dateien!"
log "✅ Gitea Dateien erfolgreich gesichert"
# ================================
# Upload auf OneDrive
# ================================
log "☁️ Lade Gitea Backup zu OneDrive hoch ..."
upload "$BACKUP_DIR/gitea-files-backup-$DATE.tar.gz" "$REMOTE/gitea-files"
log "✅ Upload abgeschlossen!"
cleanup_keep_latest "$REMOTE/gitea-files" 14
log "✅ Cleanup abgeschlossen!"
# ================================
# Lokale Backups aufräumen (optional)
# ================================
[[ -d "$BACKUP_DIR" ]] && rm -rf "$BACKUP_DIR"
log "🧹 Lokale temporäre Backups gelöscht."
log "🏁 Alle Backups abgeschlossen!"
+69
View File
@@ -0,0 +1,69 @@
#!/bin/bash
set -euo pipefail
source /home/nico/backup/lib.sh
# ================================
# Einstellungen
# ================================
MAILCOW_DIR="$HOME/docker-setup/mailcow"
TMP_BASE="/home/nico/backup/tmp/mailcow"
TMP_BACKUP_DIR="$TMP_BASE/backups"
REMOTE="onedrive-tdhaider:/IONOS/Backups/mailcow"
DATE=$(date +%F_%H-%M-%S)
# ================================
# Vorbereitung
# ================================
log "📂 Bereite temporäres Verzeichnis vor..."
rm -rf "$TMP_BASE"
mkdir -p "$TMP_BACKUP_DIR"
cd "$MAILCOW_DIR" || fail "mailcow Verzeichnis nicht gefunden!"
# ================================
# Mailcow Full Backup
# ================================
log "📦 Starte mailcow FULL Backup (backup all)..."
MAILCOW_BACKUP_LOCATION="$TMP_BACKUP_DIR" \
./helper-scripts/backup_and_restore.sh backup all
# ================================
# Letztes Backup finden
# ================================
LATEST_BACKUP=$(ls -dt "$TMP_BACKUP_DIR"/mailcow-* | head -n 1)
if [ ! -d "$LATEST_BACKUP" ]; then
log "❌ Kein mailcow Backup gefunden!"
exit 1
fi
BACKUP_NAME=$(basename "$LATEST_BACKUP")
ARCHIVE="/tmp/${BACKUP_NAME}_${DATE}.tar.zst"
# ================================
# Komprimieren
# ================================
log "🗜️ Komprimiere Backup → $ARCHIVE"
tar --use-compress-program=zstd -cf "$ARCHIVE" \
-C "$TMP_BACKUP_DIR" "$BACKUP_NAME"
# ================================
# Upload
# ================================
log "☁️ Lade Backup zu OneDrive hoch..."
upload "$ARCHIVE" "$REMOTE"
log "✅ Upload abgeschlossen!"
cleanup_keep_latest "$REMOTE" 2
log "✅ Cleanup abgeschlossen!"
# ================================
# Aufräumen
# ================================
[[ -d "$TMP_BASE" ]] && rm -rf "$TMP_BASE"
[[ -d "$ARCHIVE" ]] && rm -f "$ARCHIVE"
log "🧹 Temporäre Dateien gelöscht."
log "🏁 mailcow FULL Backup abgeschlossen!"
+52
View File
@@ -0,0 +1,52 @@
#!/bin/bash
set -euo pipefail
source /home/nico/backup/lib.sh
# ================================
# Einstellungen
# ================================
CONTAINER="postgresql" # Name deines PostgreSQL-Docker-Containers
DB_USER="nico" # PostgreSQL-User
BACKUP_DIR="/home/nico/backup/tmp/postgresql_backups" # Temporärer Backup-Pfad auf dem Server
REMOTE="onedrive-tdhaider:/IONOS/Backups" # Basisziel auf OneDrive
DATE=$(date +%F_%H-%M-%S)
# ================================
# Backup-Ordner vorbereiten
# ================================
mkdir -p "$BACKUP_DIR"
# ================================
# Funktion: Backup einer DB
# ================================
backup_db() {
local DB_NAME=$1
local FILE="$BACKUP_DIR/${DB_NAME}_${DATE}.sql.gz"
log "📦 Backup starte für Datenbank: $DB_NAME ..."
docker exec -t "$CONTAINER" pg_dump -U "$DB_USER" "$DB_NAME" | gzip > "$FILE" \
|| fail "Fehler beim Backup von $DB_NAME"
# Upload in eigenen Unterordner
log "☁️ Lade $DB_NAME Backup zu OneDrive hoch ..."
upload "$FILE" "$REMOTE/postgresql_$DB_NAME"
log "✅ Upload abgeschlossen!"
cleanup_keep_latest "$REMOTE/postgresql_$DB_NAME" 30
log "✅ Cleanup abgeschlossen!"
}
# ================================
# Datenbanken sichern und hochladen
# ================================
backup_db "gitea"
backup_db "projektverwaltung"
# ================================
# Lokale Backups aufräumen (optional)
# ================================
[[ -d "$BACKUP_DIR" ]] && rm -rf "$BACKUP_DIR"
log "🧹 Lokale temporäre Backups gelöscht."
log "🏁 Alle Backups abgeschlossen!"
+136
View File
@@ -0,0 +1,136 @@
#!/bin/bash
set -euo pipefail
source /home/nico/backup/lib.sh
# ================================
# Grundeinstellungen
# ================================
WORDPRESS_BASE="/home/nico/docker-setup/wordpress"
BACKUP_DIR="/home/nico/backup/tmp/wordpress_backups"
REMOTE="onedrive-tdhaider:/IONOS/Backups/wordpress"
MARIADB_CONTAINER="mariadb"
DATE=$(date +%F_%H-%M-%S)
RUN_DIR="$BACKUP_DIR/$DATE"
mkdir -p "$RUN_DIR"
log "🚀 Starte WordPress Komplett-Backup"
log "📅 Backup-Zeitpunkt: $DATE"
log "========================================="
FAILED_SITES=()
# ================================
# Alle WordPress-Seiten durchgehen
# ================================
for SITE_PATH in "$WORDPRESS_BASE"/*; do
[ -d "$SITE_PATH" ] || continue
SITE_NAME=$(basename "$SITE_PATH")
ENV_FILE="$SITE_PATH/.env"
VOLUME_NAME="wordpress_${SITE_NAME}"
SITE_DIR="$RUN_DIR/$SITE_NAME"
log
log "🔹 Verarbeite: $SITE_NAME"
if [ ! -f "$ENV_FILE" ]; then
log "⚠️ Keine .env gefunden überspringe"
FAILED_SITES+=("$SITE_NAME (keine .env)")
continue
fi
mkdir -p "$SITE_DIR"
# ================================
# .env laden
# ================================
set -o allexport
source "$ENV_FILE"
set +o allexport
if [[ -z "${MYSQL_DATABASE:-}" || -z "${MYSQL_USER:-}" || -z "${MYSQL_PASSWORD:-}" ]]; then
log "❌ Unvollständige DB-Zugangsdaten"
FAILED_SITES+=("$SITE_NAME (DB-Config)")
continue
fi
# ================================
# Datenbank sichern
# ================================
DB_FILE="$SITE_DIR/db_${SITE_NAME}_${DATE}.sql.gz"
log "🗄️ DB Backup: $MYSQL_DATABASE"
if docker exec -e MYSQL_PWD="$MYSQL_PASSWORD" "$MARIADB_CONTAINER" \
mariadb-dump -h 127.0.0.1 -u "$MYSQL_USER" "$MYSQL_DATABASE" \
| gzip > "$DB_FILE"; then
log "✅ DB gesichert"
else
log "❌ DB Backup fehlgeschlagen"
FAILED_SITES+=("$SITE_NAME (DB)")
continue
fi
# ================================
# WordPress-Dateien (Volume)
# ================================
FILES_FILE="$SITE_DIR/files_${SITE_NAME}_${DATE}.tar.gz"
log "📦 Dateien Backup (low-load, Volume: $VOLUME_NAME)"
if docker run --rm \
-v "$VOLUME_NAME":/data \
-v "$SITE_DIR":/backup \
alpine sh -c "
nice -n 19 ionice -c2 -n7 \
tar \
--exclude='./wp-content/cache' \
--exclude='./wp-content/wflogs' \
--exclude='./wp-content/uploads/cache' \
--exclude='./wp-content/updraft' \
-cf - -C /data . \
| gzip -1 > /backup/$(basename "$FILES_FILE")
"; then
log "✅ Dateien gesichert"
else
log "❌ Dateien Backup fehlgeschlagen"
FAILED_SITES+=("$SITE_NAME (Files)")
continue
fi
done
# ================================
# Einmaliger Upload
# ================================
log
log "☁️ Starte EINEN Gesamt-Upload zu OneDrive..."
upload "$RUN_DIR" "$REMOTE/$DATE"
log "✅ Upload abgeschlossen"
cleanup_keep_latest "$REMOTE" 2
log "✅ Cleanup abgeschlossen!"
# ================================
# Aufräumen
# ================================
[[ -d "$BACKUP_DIR" ]] && rm -rf "$BACKUP_DIR"
log
log "🧹 Lokale temporäre Backups gelöscht"
log "🏁 WordPress Backup abgeschlossen"
# ================================
# Ergebnisübersicht
# ================================
if [ "${#FAILED_SITES[@]}" -gt 0 ]; then
log
log "⚠️ Folgende Seiten hatten Probleme:"
for SITE in "${FAILED_SITES[@]}"; do
log " - $SITE"
done
else
log
log "🎉 Alle WordPress-Seiten erfolgreich gesichert!"
fi
Executable
+57
View File
@@ -0,0 +1,57 @@
#!/bin/bash
log() {
echo "[$(date '+%F %T')] $*"
}
fail() {
log "$*"
exit 1
}
upload() {
local SRC=$1
local DEST=$2
log "☁️ Upload: $SRC$DEST"
rclone copy "$SRC" "$DEST" \
--transfers=2 \
--checkers=4 \
--retries=5 \
--low-level-retries=10 \
--progress \
|| fail "Upload fehlgeschlagen: $SRC"
}
# -----------------------------
# Alte Backups behalten, nur X bleiben
# -----------------------------
cleanup_keep_latest() {
local REMOTE_PATH=$1
local KEEP_COUNT=$2
log "🧹 Prüfe alte Backups auf $REMOTE_PATH, behalte nur die letzten $KEEP_COUNT..."
# Liste aller Backups, älteste zuerst
local BACKUPS
BACKUPS=$(rclone lsf "$REMOTE_PATH" --dirs-only --max-depth 1 | sort)
local TOTAL=$(echo "$BACKUPS" | wc -l)
if (( TOTAL <= KEEP_COUNT )); then
log "✅ Nur $TOTAL Backups vorhanden, nichts zu löschen"
return
fi
# Alte Backups ermitteln
local TO_DELETE_COUNT=$((TOTAL - KEEP_COUNT))
local TO_DELETE=$(echo "$BACKUPS" | head -n "$TO_DELETE_COUNT")
for b in $TO_DELETE; do
log "🗑️ Lösche altes Backup: $REMOTE_PATH/$b"
rclone delete "$REMOTE_PATH/$b" --rmdirs || log "⚠️ Fehler beim Löschen von $b"
done
log "✅ Alte Backups bereinigt"
}