#!/bin/bash
#
# sync-from-prod.sh - Sync published content from production to local dev
#
# Pulls content tables + uploads from prod, runs URL replacement,
# and rebuilds caches. Safe: does not touch wp_options, wp_users,
# plugin tables, or local-only data.
#
# Usage: ./dev/scripts/sync/sync-from-prod.sh
#

set -euo pipefail

# ---------------------------------------------------------------------------
# Configuration
# ---------------------------------------------------------------------------
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_DIR="$(cd "$SCRIPT_DIR/../../.." && pwd)"
TMP_DIR="$SCRIPT_DIR/tmp"
LOG_DIR="$SCRIPT_DIR/logs"
LOG_FILE="$LOG_DIR/sync.log"
LOCK_FILE="$SCRIPT_DIR/.sync.lock"
TIMESTAMP=$(date +"%Y%m%d_%H%M%S")

# Tables to sync — content + taxonomy + custom content pipeline tables
SYNC_TABLES=(
    wp_posts
    wp_postmeta
    wp_terms
    wp_termmeta
    wp_term_relationships
    wp_term_taxonomy
    parketry_articles
    parketry_published_content
)

# Tables that contain URL strings (for search-replace)
URL_TABLES="wp_posts wp_postmeta wp_terms wp_term_taxonomy"

# ---------------------------------------------------------------------------
# Functions
# ---------------------------------------------------------------------------
log() {
    echo "[$(date '+%Y-%m-%d %H:%M:%S')] [SYNC] $1" | tee -a "$LOG_FILE"
}

error_exit() {
    log "ERROR: $1"
    exit 1
}

cleanup() {
    rm -f "$LOCK_FILE"
}

human_size() {
    du -h "$1" 2>/dev/null | cut -f1
}

# ---------------------------------------------------------------------------
# Setup
# ---------------------------------------------------------------------------
mkdir -p "$TMP_DIR" "$LOG_DIR"

# Lock file — prevent concurrent runs
if [[ -f "$LOCK_FILE" ]]; then
    LOCK_PID=$(cat "$LOCK_FILE" 2>/dev/null || echo "")
    if [[ -n "$LOCK_PID" ]] && kill -0 "$LOCK_PID" 2>/dev/null; then
        log "ERROR: Another sync is already running (PID: $LOCK_PID)"
        exit 1
    else
        log "WARNING: Stale lock file found, removing..."
        rm -f "$LOCK_FILE"
    fi
fi
echo $$ > "$LOCK_FILE"
trap cleanup EXIT

START_TIME=$(date +%s)

log "=========================================="
log "Starting production → local sync"
log "=========================================="

# ---------------------------------------------------------------------------
# 1. Load config
# ---------------------------------------------------------------------------
CONFIG_FILE="$SCRIPT_DIR/sync-config.sh"
if [[ ! -f "$CONFIG_FILE" ]]; then
    error_exit "Config not found: $CONFIG_FILE (copy sync-config.sh.example)"
fi
source "$CONFIG_FILE"

for var in PROD_SSH_ALIAS PROD_DB_NAME PROD_DB_USER PROD_DB_PASSWORD \
           PROD_UPLOADS_PATH PROD_DOMAIN LOCAL_DOMAIN \
           LOCAL_CONTAINER_DB LOCAL_CONTAINER_WP LOCAL_DB_NAME; do
    if [[ -z "${!var:-}" ]]; then
        error_exit "Required variable $var is not set in sync-config.sh"
    fi
done

log "Config loaded: ${PROD_DOMAIN} → ${LOCAL_DOMAIN}"

# ---------------------------------------------------------------------------
# 2. Pre-flight checks
# ---------------------------------------------------------------------------
log "Running pre-flight checks..."

# Check local containers
for container in "$LOCAL_CONTAINER_DB" "$LOCAL_CONTAINER_WP"; do
    if ! docker ps --format '{{.Names}}' | grep -q "^${container}$"; then
        error_exit "Container $container is not running"
    fi
done
log "  Local containers: OK"

# Check SSH connectivity
if ! ssh -q -o BatchMode=yes -o ConnectTimeout=10 "$PROD_SSH_ALIAS" exit 2>/dev/null; then
    error_exit "Cannot connect to $PROD_SSH_ALIAS via SSH"
fi
log "  SSH to $PROD_SSH_ALIAS: OK"

# Get local DB credentials from .env
if [[ -f "$PROJECT_DIR/.env" ]]; then
    source "$PROJECT_DIR/.env"
else
    error_exit ".env file not found at $PROJECT_DIR/.env"
fi

LOCAL_DB_PASSWORD="${MARIADB_ROOT_PASSWORD:-}"
LOCAL_DB_USER="root"
if [[ -z "$LOCAL_DB_PASSWORD" ]]; then
    LOCAL_DB_PASSWORD="${MARIADB_PASSWORD:-}"
    LOCAL_DB_USER="${MARIADB_USER:-wordpress}"
    if [[ -z "$LOCAL_DB_PASSWORD" ]]; then
        error_exit "Neither MARIADB_ROOT_PASSWORD nor MARIADB_PASSWORD set in .env"
    fi
fi
log "  Local DB credentials: OK"

# ---------------------------------------------------------------------------
# 3. Database sync
# ---------------------------------------------------------------------------
TABLES_STR="${SYNC_TABLES[*]}"
DUMP_FILE="$TMP_DIR/prod_sync_${TIMESTAMP}.sql.gz"

log "Dumping ${#SYNC_TABLES[@]} tables from production..."
log "  Tables: $TABLES_STR"

ssh "$PROD_SSH_ALIAS" "mysqldump \
    --single-transaction \
    --quick \
    --replace \
    --complete-insert \
    --no-create-info \
    -u '${PROD_DB_USER}' \
    -p'${PROD_DB_PASSWORD}' \
    '${PROD_DB_NAME}' \
    ${TABLES_STR}" 2>/dev/null | gzip > "$DUMP_FILE"

# Validate dump
if [[ ! -f "$DUMP_FILE" ]]; then
    error_exit "Dump file was not created"
fi

DUMP_SIZE=$(stat -f%z "$DUMP_FILE" 2>/dev/null || stat -c%s "$DUMP_FILE" 2>/dev/null)
if [[ "$DUMP_SIZE" -lt 1000 ]]; then
    rm -f "$DUMP_FILE"
    error_exit "Dump file is too small (${DUMP_SIZE} bytes), likely failed"
fi

log "  Dump created: $(human_size "$DUMP_FILE")"

# Import into local
log "Importing into local database..."
gunzip -c "$DUMP_FILE" | docker exec -i "$LOCAL_CONTAINER_DB" \
    mariadb -u "$LOCAL_DB_USER" -p"$LOCAL_DB_PASSWORD" "$LOCAL_DB_NAME"

log "  Database import: OK"

# ---------------------------------------------------------------------------
# 4. URL search-replace
# ---------------------------------------------------------------------------
log "Running URL search-replace: ${PROD_DOMAIN} → ${LOCAL_DOMAIN}..."

docker exec "$LOCAL_CONTAINER_WP" wp search-replace \
    "$PROD_DOMAIN" "$LOCAL_DOMAIN" \
    $URL_TABLES \
    --precise \
    --recurse-objects \
    --skip-columns=guid \
    --allow-root 2>&1 | while IFS= read -r line; do
    log "  $line"
done

log "  URL replacement: OK"

# ---------------------------------------------------------------------------
# 5. rsync uploads
# ---------------------------------------------------------------------------
LOCAL_UPLOADS="$PROJECT_DIR/wordpress/wp-content/uploads/"
log "Syncing uploads via rsync..."

rsync -avz --progress \
    -e "ssh" \
    "${PROD_SSH_ALIAS}:${PROD_UPLOADS_PATH}" \
    "$LOCAL_UPLOADS" 2>&1 | tail -3 | while IFS= read -r line; do
    log "  $line"
done

log "  Uploads sync: OK"

# ---------------------------------------------------------------------------
# 6. Post-sync tasks
# ---------------------------------------------------------------------------
log "Flushing rewrite rules..."
docker exec "$LOCAL_CONTAINER_WP" wp rewrite flush --allow-root 2>/dev/null
log "  Rewrite flush: OK"

log "Rebuilding caches..."

docker exec "$LOCAL_CONTAINER_WP" \
    php /var/www/project/dev/cronjobs/posts/cache_post_data_init.php 2>&1 | tail -5 | while IFS= read -r line; do
    log "  $line"
done
log "  Cache rebuild: OK"

log "Flushing Redis..."
docker exec "$LOCAL_CONTAINER_WP" wp cache flush --allow-root 2>/dev/null
log "  Redis flush: OK"

# ---------------------------------------------------------------------------
# 7. Cleanup old dumps (>7 days)
# ---------------------------------------------------------------------------
CLEANED=$(find "$TMP_DIR" -name "prod_sync_*.sql.gz" -mtime +7 -delete -print 2>/dev/null | wc -l | tr -d ' ')
if [[ "$CLEANED" -gt 0 ]]; then
    log "Cleaned up $CLEANED old dump(s)"
fi

# ---------------------------------------------------------------------------
# Summary
# ---------------------------------------------------------------------------
END_TIME=$(date +%s)
DURATION=$(( END_TIME - START_TIME ))
MINUTES=$(( DURATION / 60 ))
SECONDS=$(( DURATION % 60 ))

UPLOADS_SIZE=$(du -sh "$LOCAL_UPLOADS" 2>/dev/null | cut -f1 || echo "unknown")

log "=========================================="
log "Sync completed successfully"
log "  Duration: ${MINUTES}m ${SECONDS}s"
log "  DB dump: $(human_size "$DUMP_FILE")"
log "  Local uploads: $UPLOADS_SIZE"
log "  Tables synced: ${#SYNC_TABLES[@]}"
log "=========================================="

exit 0
