backup.sh now covers all data: - SQLite via podman exec into server container (fallback to host path) - Postgres via pg_dumpall inside postgres container - Forgejo data volume via podman volume export - Caddy TLS certificates via podman volume export - .env file (plaintext secrets — store archive securely) restore.sh reverses each step: imports volumes, restores Postgres, restores SQLite, optionally restores .env (--force to overwrite). Both scripts find containers dynamically via compose service labels so they work regardless of the container name podman-compose assigns. .env.example documents HIY_BACKUP_DIR, HIY_BACKUP_REMOTE, HIY_BACKUP_RETAIN_DAYS. https://claude.ai/code/session_01FKCW3FDjNFj6jve4niMFXH
157 lines
7.4 KiB
Bash
Executable file
157 lines
7.4 KiB
Bash
Executable file
#!/usr/bin/env bash
|
|
# HIY daily backup script
|
|
#
|
|
# What is backed up:
|
|
# 1. SQLite database (hiy.db) — apps, deploys, env vars, users
|
|
# 2. Env files — per-deploy decrypted env files
|
|
# 3. Git repos — bare repos for git-push deploys
|
|
# 4. Postgres — pg_dumpall (hiy + forgejo databases)
|
|
# 5. Forgejo data volume — repositories, avatars, LFS objects
|
|
# 6. Caddy TLS certificates — caddy-data volume
|
|
# 7. .env file — secrets (handle the archive with care)
|
|
#
|
|
# Destination options (mutually exclusive; set one):
|
|
# HIY_BACKUP_DIR — local path (e.g. /mnt/usb/hiy-backups, default /tmp/hiy-backups)
|
|
# HIY_BACKUP_REMOTE — rclone remote:path (e.g. "b2:mybucket/hiy")
|
|
# requires rclone installed and configured
|
|
#
|
|
# Retention: 30 days local (remote retention managed by the storage provider).
|
|
#
|
|
# Suggested cron (run as the same user that owns the containers):
|
|
# 0 3 * * * /path/to/infra/backup.sh >> /var/log/hiy-backup.log 2>&1
|
|
|
|
set -euo pipefail
|
|
|
|
# ── Config ─────────────────────────────────────────────────────────────────────
|
|
HIY_DATA_DIR="${HIY_DATA_DIR:-/data}"
|
|
BACKUP_DIR="${HIY_BACKUP_DIR:-/tmp/hiy-backups}"
|
|
BACKUP_REMOTE="${HIY_BACKUP_REMOTE:-}"
|
|
RETAIN_DAYS="${HIY_BACKUP_RETAIN_DAYS:-30}"
|
|
|
|
# Load .env from the repo root (one level up from infra/) so the backup cron
|
|
# can find HIY_DATA_DIR, container names, etc. without extra shell setup.
|
|
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
|
ENV_FILE="${SCRIPT_DIR}/../.env"
|
|
|
|
TIMESTAMP=$(date +%Y%m%d-%H%M%S)
|
|
ARCHIVE_NAME="hiy-backup-${TIMESTAMP}.tar.gz"
|
|
STAGING="${BACKUP_DIR}/staging-${TIMESTAMP}"
|
|
|
|
log() { echo "[hiy-backup] $(date '+%H:%M:%S') $*"; }
|
|
|
|
log "=== HIY Backup ==="
|
|
log "Data dir : ${HIY_DATA_DIR}"
|
|
log "Staging : ${STAGING}"
|
|
|
|
mkdir -p "${STAGING}"
|
|
|
|
# ── Helper: find a running container by compose service label ──────────────────
|
|
find_container() {
|
|
local service="$1"
|
|
podman ps --filter "label=com.docker.compose.service=${service}" \
|
|
--format '{{.Names}}' | head -1
|
|
}
|
|
|
|
# ── 1. SQLite ──────────────────────────────────────────────────────────────────
|
|
log "--- SQLite ---"
|
|
SERVER_CTR=$(find_container server)
|
|
if [ -n "${SERVER_CTR}" ]; then
|
|
log "Dumping hiy.db via container ${SERVER_CTR}…"
|
|
podman exec "${SERVER_CTR}" sqlite3 "${HIY_DATA_DIR}/hiy.db" .dump \
|
|
> "${STAGING}/hiy.sql"
|
|
elif [ -f "${HIY_DATA_DIR}/hiy.db" ]; then
|
|
log "Server container not running — dumping from host path…"
|
|
sqlite3 "${HIY_DATA_DIR}/hiy.db" .dump > "${STAGING}/hiy.sql"
|
|
else
|
|
log "WARNING: hiy.db not found — skipping SQLite dump"
|
|
fi
|
|
|
|
# ── 2. Env files ───────────────────────────────────────────────────────────────
|
|
log "--- Env files ---"
|
|
if [ -n "${SERVER_CTR}" ]; then
|
|
podman exec "${SERVER_CTR}" sh -c \
|
|
"[ -d ${HIY_DATA_DIR}/envs ] && tar -C ${HIY_DATA_DIR} -czf - envs" \
|
|
> "${STAGING}/envs.tar.gz" 2>/dev/null || true
|
|
elif [ -d "${HIY_DATA_DIR}/envs" ]; then
|
|
tar -czf "${STAGING}/envs.tar.gz" -C "${HIY_DATA_DIR}" envs
|
|
fi
|
|
|
|
# ── 3. Git repos ───────────────────────────────────────────────────────────────
|
|
log "--- Git repos ---"
|
|
if [ -n "${SERVER_CTR}" ]; then
|
|
podman exec "${SERVER_CTR}" sh -c \
|
|
"[ -d ${HIY_DATA_DIR}/repos ] && tar -C ${HIY_DATA_DIR} -czf - repos" \
|
|
> "${STAGING}/repos.tar.gz" 2>/dev/null || true
|
|
elif [ -d "${HIY_DATA_DIR}/repos" ]; then
|
|
tar -czf "${STAGING}/repos.tar.gz" -C "${HIY_DATA_DIR}" repos
|
|
fi
|
|
|
|
# ── 4. Postgres ────────────────────────────────────────────────────────────────
|
|
log "--- Postgres ---"
|
|
PG_CTR=$(find_container postgres)
|
|
if [ -n "${PG_CTR}" ]; then
|
|
log "Running pg_dumpall via container ${PG_CTR}…"
|
|
podman exec "${PG_CTR}" pg_dumpall -U hiy_admin \
|
|
> "${STAGING}/postgres.sql"
|
|
else
|
|
log "WARNING: postgres container not running — skipping Postgres dump"
|
|
fi
|
|
|
|
# ── 5. Forgejo data volume ─────────────────────────────────────────────────────
|
|
log "--- Forgejo volume ---"
|
|
if podman volume exists forgejo-data 2>/dev/null; then
|
|
log "Exporting forgejo-data volume…"
|
|
podman volume export forgejo-data > "${STAGING}/forgejo-data.tar"
|
|
else
|
|
log "forgejo-data volume not found — skipping"
|
|
fi
|
|
|
|
# ── 6. Caddy TLS certificates ──────────────────────────────────────────────────
|
|
log "--- Caddy volume ---"
|
|
if podman volume exists caddy-data 2>/dev/null; then
|
|
log "Exporting caddy-data volume…"
|
|
podman volume export caddy-data > "${STAGING}/caddy-data.tar"
|
|
else
|
|
log "caddy-data volume not found — skipping"
|
|
fi
|
|
|
|
# ── 7. .env file ───────────────────────────────────────────────────────────────
|
|
log "--- .env ---"
|
|
if [ -f "${ENV_FILE}" ]; then
|
|
cp "${ENV_FILE}" "${STAGING}/dot-env"
|
|
log "WARNING: archive contains plaintext secrets — store it securely"
|
|
else
|
|
log ".env not found at ${ENV_FILE} — skipping"
|
|
fi
|
|
|
|
# ── Create archive ─────────────────────────────────────────────────────────────
|
|
mkdir -p "${BACKUP_DIR}"
|
|
ARCHIVE_PATH="${BACKUP_DIR}/${ARCHIVE_NAME}"
|
|
log "Creating archive: ${ARCHIVE_PATH}"
|
|
tar -czf "${ARCHIVE_PATH}" -C "${STAGING}" .
|
|
rm -rf "${STAGING}"
|
|
|
|
ARCHIVE_SIZE=$(du -sh "${ARCHIVE_PATH}" | cut -f1)
|
|
log "Archive size: ${ARCHIVE_SIZE}"
|
|
|
|
# ── Upload to remote (optional) ────────────────────────────────────────────────
|
|
if [ -n "${BACKUP_REMOTE}" ]; then
|
|
if command -v rclone &>/dev/null; then
|
|
log "Uploading to ${BACKUP_REMOTE}…"
|
|
rclone copy "${ARCHIVE_PATH}" "${BACKUP_REMOTE}/"
|
|
log "Upload complete."
|
|
else
|
|
log "WARNING: HIY_BACKUP_REMOTE is set but rclone is not installed — skipping"
|
|
log "Install: https://rclone.org/install/"
|
|
fi
|
|
fi
|
|
|
|
# ── Rotate old local backups ───────────────────────────────────────────────────
|
|
log "Removing local backups older than ${RETAIN_DAYS} days…"
|
|
find "${BACKUP_DIR}" -maxdepth 1 -name 'hiy-backup-*.tar.gz' \
|
|
-mtime "+${RETAIN_DAYS}" -delete
|
|
|
|
REMAINING=$(find "${BACKUP_DIR}" -maxdepth 1 -name 'hiy-backup-*.tar.gz' | wc -l)
|
|
log "Local backups retained: ${REMAINING}"
|
|
|
|
log "=== Backup complete: ${ARCHIVE_NAME} ==="
|