claude/heroku-clone-mvp-plan-NREhc #1

Merged
sander merged 42 commits from claude/heroku-clone-mvp-plan-NREhc into main 2026-03-29 07:24:40 +00:00
3 changed files with 245 additions and 26 deletions
Showing only changes of commit e8d303f184 - Show all commits

View file

@ -15,3 +15,12 @@ POSTGRES_PASSWORD=changeme
# Forgejo (optional — only needed if you add the forgejo service to docker-compose.yml).
FORGEJO_DB_PASSWORD=changeme
FORGEJO_DOMAIN=git.yourdomain.com
# ── Backup (infra/backup.sh) ──────────────────────────────────────────────────
# Local directory to store backup archives.
HIY_BACKUP_DIR=/mnt/usb/hiy-backups
# Optional rclone remote (e.g. "b2:mybucket/hiy", "s3:mybucket/hiy").
# Requires rclone installed and configured. Leave blank to skip remote upload.
HIY_BACKUP_REMOTE=
# How many days to keep local archives (default 30).
HIY_BACKUP_RETAIN_DAYS=30

View file

@ -3,17 +3,21 @@
#
# What is backed up:
# 1. SQLite database (hiy.db) — apps, deploys, env vars, users
# 2. Env files directory — decrypted env files written per deploy
# 2. Env files — per-deploy decrypted env files
# 3. Git repos — bare repos for git-push deploys
# 4. Postgres — pg_dumpall (hiy + forgejo databases)
# 5. Forgejo data volume — repositories, avatars, LFS objects
# 6. Caddy TLS certificates — caddy-data volume
# 7. .env file — secrets (handle the archive with care)
#
# Destination options (mutually exclusive; set one):
# HIY_BACKUP_DIR — local path (e.g. /mnt/usb/hiy-backups, default /tmp/hiy-backups)
# HIY_BACKUP_REMOTE — rclone remote:path (e.g. "b2:mybucket/hiy")
# requires rclone installed and configured
#
# Retention: 30 days (local only; remote retention is managed by the storage provider)
# Retention: 30 days local (remote retention managed by the storage provider).
#
# Suggested cron (run as the same user as hiy-server):
# Suggested cron (run as the same user that owns the containers):
# 0 3 * * * /path/to/infra/backup.sh >> /var/log/hiy-backup.log 2>&1
set -euo pipefail
@ -24,6 +28,11 @@ BACKUP_DIR="${HIY_BACKUP_DIR:-/tmp/hiy-backups}"
BACKUP_REMOTE="${HIY_BACKUP_REMOTE:-}"
RETAIN_DAYS="${HIY_BACKUP_RETAIN_DAYS:-30}"
# Load .env from the repo root (one level up from infra/) so the backup cron
# can find HIY_DATA_DIR, container names, etc. without extra shell setup.
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
ENV_FILE="${SCRIPT_DIR}/../.env"
TIMESTAMP=$(date +%Y%m%d-%H%M%S)
ARCHIVE_NAME="hiy-backup-${TIMESTAMP}.tar.gz"
STAGING="${BACKUP_DIR}/staging-${TIMESTAMP}"
@ -31,33 +40,91 @@ STAGING="${BACKUP_DIR}/staging-${TIMESTAMP}"
log() { echo "[hiy-backup] $(date '+%H:%M:%S') $*"; }
log "=== HIY Backup ==="
log "Data dir: ${HIY_DATA_DIR}"
log "Staging: ${STAGING}"
log "Data dir : ${HIY_DATA_DIR}"
log "Staging : ${STAGING}"
# ── 1. Stage files ─────────────────────────────────────────────────────────────
mkdir -p "${STAGING}"
# SQLite: use the .dump command to produce a portable SQL text dump.
if [ -f "${HIY_DATA_DIR}/hiy.db" ]; then
log "Dumping SQLite database…"
# ── Helper: find a running container by compose service label ──────────────────
find_container() {
local service="$1"
podman ps --filter "label=com.docker.compose.service=${service}" \
--format '{{.Names}}' | head -1
}
# ── 1. SQLite ──────────────────────────────────────────────────────────────────
log "--- SQLite ---"
SERVER_CTR=$(find_container server)
if [ -n "${SERVER_CTR}" ]; then
log "Dumping hiy.db via container ${SERVER_CTR}"
podman exec "${SERVER_CTR}" sqlite3 "${HIY_DATA_DIR}/hiy.db" .dump \
> "${STAGING}/hiy.sql"
elif [ -f "${HIY_DATA_DIR}/hiy.db" ]; then
log "Server container not running — dumping from host path…"
sqlite3 "${HIY_DATA_DIR}/hiy.db" .dump > "${STAGING}/hiy.sql"
else
log "WARNING: ${HIY_DATA_DIR}/hiy.db not found — skipping SQLite dump"
log "WARNING: hiy.db not found — skipping SQLite dump"
fi
# Env files (contain decrypted secrets — handle with care).
if [ -d "${HIY_DATA_DIR}/envs" ]; then
log "Copying env files…"
cp -r "${HIY_DATA_DIR}/envs" "${STAGING}/envs"
# ── 2. Env files ───────────────────────────────────────────────────────────────
log "--- Env files ---"
if [ -n "${SERVER_CTR}" ]; then
podman exec "${SERVER_CTR}" sh -c \
"[ -d ${HIY_DATA_DIR}/envs ] && tar -C ${HIY_DATA_DIR} -czf - envs" \
> "${STAGING}/envs.tar.gz" 2>/dev/null || true
elif [ -d "${HIY_DATA_DIR}/envs" ]; then
tar -czf "${STAGING}/envs.tar.gz" -C "${HIY_DATA_DIR}" envs
fi
# Bare git repos.
if [ -d "${HIY_DATA_DIR}/repos" ]; then
log "Copying git repos…"
cp -r "${HIY_DATA_DIR}/repos" "${STAGING}/repos"
# ── 3. Git repos ───────────────────────────────────────────────────────────────
log "--- Git repos ---"
if [ -n "${SERVER_CTR}" ]; then
podman exec "${SERVER_CTR}" sh -c \
"[ -d ${HIY_DATA_DIR}/repos ] && tar -C ${HIY_DATA_DIR} -czf - repos" \
> "${STAGING}/repos.tar.gz" 2>/dev/null || true
elif [ -d "${HIY_DATA_DIR}/repos" ]; then
tar -czf "${STAGING}/repos.tar.gz" -C "${HIY_DATA_DIR}" repos
fi
# ── 2. Create archive ──────────────────────────────────────────────────────────
# ── 4. Postgres ────────────────────────────────────────────────────────────────
log "--- Postgres ---"
PG_CTR=$(find_container postgres)
if [ -n "${PG_CTR}" ]; then
log "Running pg_dumpall via container ${PG_CTR}"
podman exec "${PG_CTR}" pg_dumpall -U hiy_admin \
> "${STAGING}/postgres.sql"
else
log "WARNING: postgres container not running — skipping Postgres dump"
fi
# ── 5. Forgejo data volume ─────────────────────────────────────────────────────
log "--- Forgejo volume ---"
if podman volume exists forgejo-data 2>/dev/null; then
log "Exporting forgejo-data volume…"
podman volume export forgejo-data > "${STAGING}/forgejo-data.tar"
else
log "forgejo-data volume not found — skipping"
fi
# ── 6. Caddy TLS certificates ──────────────────────────────────────────────────
log "--- Caddy volume ---"
if podman volume exists caddy-data 2>/dev/null; then
log "Exporting caddy-data volume…"
podman volume export caddy-data > "${STAGING}/caddy-data.tar"
else
log "caddy-data volume not found — skipping"
fi
# ── 7. .env file ───────────────────────────────────────────────────────────────
log "--- .env ---"
if [ -f "${ENV_FILE}" ]; then
cp "${ENV_FILE}" "${STAGING}/dot-env"
log "WARNING: archive contains plaintext secrets — store it securely"
else
log ".env not found at ${ENV_FILE} — skipping"
fi
# ── Create archive ─────────────────────────────────────────────────────────────
mkdir -p "${BACKUP_DIR}"
ARCHIVE_PATH="${BACKUP_DIR}/${ARCHIVE_NAME}"
log "Creating archive: ${ARCHIVE_PATH}"
@ -67,19 +134,19 @@ rm -rf "${STAGING}"
ARCHIVE_SIZE=$(du -sh "${ARCHIVE_PATH}" | cut -f1)
log "Archive size: ${ARCHIVE_SIZE}"
# ── 3. Upload to remote (optional) ────────────────────────────────────────────
# ── Upload to remote (optional) ────────────────────────────────────────────────
if [ -n "${BACKUP_REMOTE}" ]; then
if command -v rclone &>/dev/null; then
log "Uploading to remote: ${BACKUP_REMOTE}"
log "Uploading to ${BACKUP_REMOTE}"
rclone copy "${ARCHIVE_PATH}" "${BACKUP_REMOTE}/"
log "Upload complete."
else
log "WARNING: HIY_BACKUP_REMOTE is set but rclone is not installed — skipping upload"
log "Install rclone: https://rclone.org/install/"
log "WARNING: HIY_BACKUP_REMOTE is set but rclone is not installed — skipping"
log "Install: https://rclone.org/install/"
fi
fi
# ── 4. Rotate old local backups ────────────────────────────────────────────────
# ── Rotate old local backups ───────────────────────────────────────────────────
log "Removing local backups older than ${RETAIN_DAYS} days…"
find "${BACKUP_DIR}" -maxdepth 1 -name 'hiy-backup-*.tar.gz' \
-mtime "+${RETAIN_DAYS}" -delete

143
infra/restore.sh Executable file
View file

@ -0,0 +1,143 @@
#!/usr/bin/env bash
# HIY restore script
#
# Restores a backup archive produced by infra/backup.sh.
#
# Usage:
# ./infra/restore.sh /path/to/hiy-backup-20260101-030000.tar.gz
#
# What is restored:
# 1. SQLite database (hiy.db)
# 2. Env files and git repos
# 3. Postgres databases (pg_dumpall dump)
# 4. Forgejo data volume
# 5. Caddy TLS certificates
# 6. .env file (optional — skipped if already present unless --force is passed)
#
# ⚠ Run this with the stack STOPPED, then bring it back up afterwards:
# podman compose -f infra/docker-compose.yml down
# ./infra/restore.sh hiy-backup-*.tar.gz
# podman compose -f infra/docker-compose.yml up -d
set -euo pipefail
ARCHIVE="${1:-}"
FORCE="${2:-}"
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
ENV_FILE="${SCRIPT_DIR}/../.env"
HIY_DATA_DIR="${HIY_DATA_DIR:-/data}"
log() { echo "[hiy-restore] $(date '+%H:%M:%S') $*"; }
die() { log "ERROR: $*"; exit 1; }
# ── Validate ───────────────────────────────────────────────────────────────────
[ -z "${ARCHIVE}" ] && die "Usage: $0 <archive.tar.gz> [--force]"
[ -f "${ARCHIVE}" ] || die "Archive not found: ${ARCHIVE}"
WORK_DIR=$(mktemp -d)
trap 'rm -rf "${WORK_DIR}"' EXIT
log "=== HIY Restore ==="
log "Archive : ${ARCHIVE}"
log "Work dir: ${WORK_DIR}"
log "Extracting archive…"
tar -xzf "${ARCHIVE}" -C "${WORK_DIR}"
# ── Helper: find a running container by compose service label ──────────────────
find_container() {
local service="$1"
podman ps --filter "label=com.docker.compose.service=${service}" \
--format '{{.Names}}' | head -1
}
# ── 1. .env file ───────────────────────────────────────────────────────────────
log "--- .env ---"
if [ -f "${WORK_DIR}/dot-env" ]; then
if [ -f "${ENV_FILE}" ] && [ "${FORCE}" != "--force" ]; then
log "SKIP: ${ENV_FILE} already exists (pass --force to overwrite)"
else
cp "${WORK_DIR}/dot-env" "${ENV_FILE}"
log "Restored .env to ${ENV_FILE}"
fi
else
log "No .env in archive — skipping"
fi
# ── 2. SQLite ──────────────────────────────────────────────────────────────────
log "--- SQLite ---"
if [ -f "${WORK_DIR}/hiy.sql" ]; then
DB_PATH="${HIY_DATA_DIR}/hiy.db"
mkdir -p "$(dirname "${DB_PATH}")"
if [ -f "${DB_PATH}" ]; then
log "Moving existing hiy.db to hiy.db.bak…"
mv "${DB_PATH}" "${DB_PATH}.bak"
fi
log "Restoring hiy.db…"
sqlite3 "${DB_PATH}" < "${WORK_DIR}/hiy.sql"
log "SQLite restored."
else
log "No hiy.sql in archive — skipping"
fi
# ── 3. Env files & git repos ───────────────────────────────────────────────────
log "--- Env files ---"
if [ -f "${WORK_DIR}/envs.tar.gz" ]; then
log "Restoring envs/…"
tar -xzf "${WORK_DIR}/envs.tar.gz" -C "${HIY_DATA_DIR}"
fi
log "--- Git repos ---"
if [ -f "${WORK_DIR}/repos.tar.gz" ]; then
log "Restoring repos/…"
tar -xzf "${WORK_DIR}/repos.tar.gz" -C "${HIY_DATA_DIR}"
fi
# ── 4. Postgres ────────────────────────────────────────────────────────────────
log "--- Postgres ---"
if [ -f "${WORK_DIR}/postgres.sql" ]; then
PG_CTR=$(find_container postgres)
if [ -n "${PG_CTR}" ]; then
log "Restoring Postgres via container ${PG_CTR}"
# Drop existing connections then restore.
podman exec -i "${PG_CTR}" psql -U hiy_admin -d postgres \
-c "SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname IN ('hiy','forgejo') AND pid <> pg_backend_pid();" \
> /dev/null 2>&1 || true
podman exec -i "${PG_CTR}" psql -U hiy_admin -d postgres \
< "${WORK_DIR}/postgres.sql"
log "Postgres restored."
else
log "WARNING: postgres container not running"
log " Start Postgres first, then run:"
log " podman exec -i <postgres_container> psql -U hiy_admin -d postgres < ${WORK_DIR}/postgres.sql"
fi
else
log "No postgres.sql in archive — skipping"
fi
# ── 5. Forgejo data volume ─────────────────────────────────────────────────────
log "--- Forgejo volume ---"
if [ -f "${WORK_DIR}/forgejo-data.tar" ]; then
log "Importing forgejo-data volume…"
podman volume exists forgejo-data 2>/dev/null || podman volume create forgejo-data
podman volume import forgejo-data "${WORK_DIR}/forgejo-data.tar"
log "forgejo-data restored."
else
log "No forgejo-data.tar in archive — skipping"
fi
# ── 6. Caddy TLS certificates ──────────────────────────────────────────────────
log "--- Caddy volume ---"
if [ -f "${WORK_DIR}/caddy-data.tar" ]; then
log "Importing caddy-data volume…"
podman volume exists caddy-data 2>/dev/null || podman volume create caddy-data
podman volume import caddy-data "${WORK_DIR}/caddy-data.tar"
log "caddy-data restored."
else
log "No caddy-data.tar in archive — skipping"
fi
log "=== Restore complete ==="
log "Bring the stack back up with:"
log " podman compose -f ${SCRIPT_DIR}/docker-compose.yml up -d"