Files
harheimertc/deploy-production.sh

179 lines
5.5 KiB
Bash
Executable File

#!/bin/bash
set -euo pipefail
# Immer im Repo-Verzeichnis arbeiten (wichtig für Backup/Restore mit relativen Pfaden)
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR"
# Deployment Script für Harheimer TC Website
# Sichert Produktivdaten vor dem Build und stellt sie danach wieder her
echo "=== Harheimer TC Deployment ==="
echo ""
echo "Working directory: $(pwd)"
echo ""
if ! git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
echo "ERROR: Dieses Script muss im Git-Repository ausgeführt werden (kein .git gefunden)."
exit 1
fi
# Optional (empfohlen): Persistente Daten außerhalb des Git-Repos halten und per Symlink einbinden.
# Das verhindert zuverlässig, dass Git jemals Produktivdaten überschreibt.
DEPLOY_HOME="${DEPLOY_HOME:-${HOME:-/tmp}}"
DATA_ROOT="${DATA_ROOT:-$DEPLOY_HOME/harheimertc-data}"
BACKUP_ROOT="${BACKUP_ROOT:-$DEPLOY_HOME/harheimertc-backups}"
mkdir -p "$DATA_ROOT" "$BACKUP_ROOT"
ensure_symlink_dir() {
local src="$1" # z.B. server/data
local target="$2" # z.B. /var/lib/harheimertc/server-data
mkdir -p "$(dirname "$src")"
mkdir -p "$target"
if [ -L "$src" ]; then
return 0
fi
if [ -d "$src" ]; then
echo " Moving $src -> $target (first-time migration)"
# Merge existing content into target
cp -a "$src/." "$target/" || true
rm -rf "$src"
fi
ln -s "$target" "$src"
echo " Linked $src -> $target"
}
has_tracked_files_under() {
local prefix="$1" # e.g. public/data
# If any file is tracked under this path, symlinking the directory will break git operations
git ls-files "$prefix" | head -n 1 | grep -q .
}
echo "0. Ensuring persistent data directories (recommended)..."
ensure_symlink_dir "server/data" "$DATA_ROOT/server-data"
# IMPORTANT: Only symlink public/data if it's not tracked by git.
# Otherwise git will error with "path is beyond a symbolic link".
if has_tracked_files_under "public/data"; then
echo " Skipping symlink for public/data (tracked files detected in git)."
echo " Recommendation: remove public/data/*.csv from git history and keep them only as production data."
else
ensure_symlink_dir "public/data" "$DATA_ROOT/public-data"
fi
ensure_symlink_dir "public/uploads" "$DATA_ROOT/public-uploads"
echo ""
# 1. BACKUP: Laufende Produktivdaten VOR allen Git-Operationen sichern
echo "1. Backing up current production data (pre-git)..."
# Human readable timestamp (lokal)
BACKUP_TS="$(date +"%Y-%m-%d_%H-%M-%S")"
BACKUP_DIR="$BACKUP_ROOT/backup_$BACKUP_TS"
mkdir -p "$BACKUP_DIR"
echo " Backup directory: $BACKUP_DIR"
# Backup server data (JSON) und CSVs immer vom Dateisystem, nicht aus 'stash'
if [ -d server/data ]; then
cp -a server/data "$BACKUP_DIR/server-data"
echo " Backed up server/data -> $BACKUP_DIR/server-data"
else
echo "ERROR: server/data existiert nicht. Abbruch, damit wir keine Repo-Defaults ausrollen."
exit 1
fi
if ls public/data/*.csv >/dev/null 2>&1; then
mkdir -p "$BACKUP_DIR/public-data"
cp -a public/data/*.csv "$BACKUP_DIR/public-data/"
echo " Backed up public/data/*.csv -> $BACKUP_DIR/public-data/"
else
echo " No public CSVs to backup (public/data/*.csv not found)"
fi
# 2. Handle local changes and Git Pull
echo "2. Handling local changes and pulling latest from git..."
# Check if there are merge conflicts first
if [ -n "$(git status --porcelain | grep '^UU\|^AA\|^DD')" ]; then
echo " Resolving existing merge conflicts..."
git reset --hard HEAD
fi
# Ensure a clean working tree (we avoid git stash because it breaks with symlinked data paths)
if [ -n "$(git status --porcelain)" ]; then
echo "ERROR: Working tree is not clean. Please commit/revert changes before deployment."
echo "Hint: If this is caused by tracked production data files, remove them from git tracking."
git status --porcelain
exit 1
fi
# Pull latest changes
echo " Pulling latest changes..."
git pull
# Reset any accidental changes from stash restore (should be none now)
git reset --hard HEAD >/dev/null 2>&1
# 3. Install dependencies
echo ""
echo "3. Installing dependencies..."
npm install
# 4. Remove old build (but keep data!)
echo ""
echo "4. Removing old build output..."
rm -rf .output
# 5. Build
echo ""
echo "5. Building application..."
npm run build
# 6. Restore Production Data (überschreibe Repo-Defaults mit Backup)
echo ""
echo "6. Restoring production data..."
# Stelle server/data vollständig wieder her (inkl. config.json, users.json, news.json, sessions.json, members.json, membership-applications)
if [ ! -d "$BACKUP_DIR/server-data" ]; then
echo "ERROR: Backup-Verzeichnis $BACKUP_DIR/server-data fehlt. Abbruch."
exit 1
fi
mkdir -p server/data
cp -a "$BACKUP_DIR/server-data/." server/data/
echo " Restored server/data from backup ($BACKUP_DIR/server-data)."
# Stelle alle CSVs wieder her
if ls "$BACKUP_DIR/public-data"/*.csv >/dev/null 2>&1; then
mkdir -p public/data
cp -a "$BACKUP_DIR/public-data"/*.csv public/data/
echo " Restored public/data/*.csv from backup ($BACKUP_DIR/public-data)."
else
echo "No public CSVs to restore"
fi
# Sanity Check: users.json muss existieren und darf nicht leer sein
if [ ! -s server/data/users.json ]; then
echo "ERROR: server/data/users.json fehlt oder ist leer nach Restore. Abbruch."
exit 1
fi
# 7. Cleanup (Backups werden bewusst behalten)
echo ""
echo "7. Keeping backups in $BACKUP_ROOT (no git stash used)."
# 8. Restart PM2
echo ""
echo "8. Restarting PM2..."
pm2 restart harheimertc
echo ""
echo "=== Deployment completed successfully! ==="
echo "The application is now running with the latest code and your production data preserved."