#!/bin/bash
# ==============================================================================
# Provisioning Script - SuiteX Event Backbone
# V10 Compliant: 8 environment-isolated Pub/Sub topics, 10 subscriptions.
# ==============================================================================
# IMPORTANT: This script creates REAL billable resources in GCP.
# Run: ./provision_gcp_test_env.sh [sandbox|production]
# ==============================================================================

set -e # Stop the script if any error occurs

# --- ENVIRONMENT ---
# V10 P2.10: All topic names MUST use an environment suffix — never hardcoded.
# Usage: ./provision_gcp_test_env.sh sandbox
#        ./provision_gcp_test_env.sh production
ENV=${1:-"sandbox"}

if [[ "$ENV" != "sandbox" && "$ENV" != "production" ]]; then
    echo "❌ Invalid environment '$ENV'. Must be 'sandbox' or 'production'."
    exit 1
fi

export PROJECT_ID=$(gcloud config get-value project)
export REGION="us-central1"
export SERVICE_ACCOUNT_NAME="sync-worker-${ENV}"
export SQL_INSTANCE_NAME="suitex-evt-store-${ENV}"
if [[ "$ENV" == "production" ]]; then
    export SQL_ROOT_PASS="R00t!$(openssl rand -hex 16)"
    export SQL_USER_PASS="W0rk!$(openssl rand -hex 16)"
else
    export SQL_ROOT_PASS="RootPass!2026_${RANDOM}"
    export SQL_USER_PASS="WorkerPass!2026_${RANDOM}"
fi

echo "=========================================================="
echo "🚀 Starting Event Backbone Provisioning (Environment: $ENV)"
echo "📍 GCP Project: $PROJECT_ID"
echo "🌐 Network IP   : $( [[ "$ENV" == "production" ]] && echo "PRIVATE" || echo "PUBLIC")"
echo "🏗️  Availability : $( [[ "$ENV" == "production" ]] && echo "REGIONAL (HA)" || echo "ZONAL")"
echo "=========================================================="
echo ""

# 1. ENABLE REQUIRED APIS
echo "🛠️  1. Enabling required APIs (Pub/Sub, IAM, SQL, GCS)..."
gcloud services enable \
    pubsub.googleapis.com \
    iam.googleapis.com \
    sqladmin.googleapis.com \
    storage.googleapis.com

# 2. CREATE SERVICE ACCOUNT (IAM)
echo "🔐 2. Configuring Service Account..."
if ! gcloud iam service-accounts describe "${SERVICE_ACCOUNT_NAME}@${PROJECT_ID}.iam.gserviceaccount.com" >/dev/null 2>&1; then
    gcloud iam service-accounts create ${SERVICE_ACCOUNT_NAME} \
        --description="Event Backbone Worker ($ENV)" \
        --display-name="Sync Worker ($ENV)"
    echo "   ✅ Service account ${SERVICE_ACCOUNT_NAME} created."
else
    echo "   ⚡ Service account already exists. Skipping..."
fi

# Assign Roles
echo "   Assigning roles to service account..."
for ROLE in "roles/pubsub.publisher" "roles/pubsub.subscriber" "roles/storage.objectAdmin"; do
    if ! gcloud projects get-iam-policy ${PROJECT_ID} \
        --flatten="bindings[].members" \
        --format="value(bindings.role)" \
        --filter="bindings.members:serviceAccount:${SERVICE_ACCOUNT_NAME}@${PROJECT_ID}.iam.gserviceaccount.com AND bindings.role:${ROLE}" \
        2>/dev/null | grep -q "${ROLE}"; then
        gcloud projects add-iam-policy-binding ${PROJECT_ID} \
            --member="serviceAccount:${SERVICE_ACCOUNT_NAME}@${PROJECT_ID}.iam.gserviceaccount.com" \
            --role="${ROLE}" >/dev/null
        echo "     ✅ Role ${ROLE} assigned."
    else
        echo "     ⚡ Role ${ROLE} already assigned. Skipping..."
    fi
done
echo "   ✅ Roles configuration completed."

# 3. CREATE BUCKET (GCS)
BUCKET_NAME="suitex-payload-snapshots-${ENV}-${PROJECT_ID}"
echo "🪣  3. Creating GCS Bucket ($BUCKET_NAME)..."
if ! gcloud storage buckets describe "gs://${BUCKET_NAME}" >/dev/null 2>&1; then
    gcloud storage buckets create "gs://${BUCKET_NAME}" \
        --location=${REGION} \
        --uniform-bucket-level-access
    
    # Create lifecycle policy (30 days)
    cat <<EOF > lifecycle.json
{
  "rule": [
    {
      "action": {"type": "Delete"},
      "condition": {"age": 30}
    }
  ]
}
EOF
    gcloud storage buckets update "gs://${BUCKET_NAME}" --lifecycle-file=lifecycle.json
    rm lifecycle.json
    echo "   ✅ Bucket created with 30-day retention policy."
else
    echo "   ⚡ Bucket already exists. Skipping..."
fi


# 4. PUB/SUB — Full V10 Topology
# V10 Appendix E: 4 topic types × 2 environments = 8 topics total.
# V10 Appendix A5: 5 subscription types × 2 environments = 10 subscriptions total.
# V10 P2.10: environment suffix is MANDATORY — names are NEVER hardcoded.
echo "📨 4. Configuring Pub/Sub Topology (V10 Compliant)..."

create_topic() {
    local TOPIC=$1
    if ! gcloud pubsub topics describe "${TOPIC}" >/dev/null 2>&1; then
        gcloud pubsub topics create "${TOPIC}"
        echo "   ✅ Topic created: ${TOPIC}"
    else
        echo "   ⚡ Topic exists: ${TOPIC}"
    fi
}

create_subscription() {
    local SUB=$1
    local TOPIC=$2
    local EXTRA_FLAGS=${3:-""}
    if ! gcloud pubsub subscriptions describe "${SUB}" >/dev/null 2>&1; then
        # shellcheck disable=SC2086
        gcloud pubsub subscriptions create "${SUB}" \
            --topic="${TOPIC}" \
            --enable-message-ordering \
            ${EXTRA_FLAGS}
        echo "   ✅ Subscription created: ${SUB} → ${TOPIC}"
    else
        echo "   ⚡ Subscription exists: ${SUB}"
    fi
}

# --- 4.1 Create all 8 topics (V10 Appendix E × 2 environments) ---
# Topic naming contract: events.{type}.{env}
# where {env} must be derived from config — never a hardcoded string.
create_topic "events.raw.${ENV}"
create_topic "events.merged.${ENV}"
create_topic "events.error.${ENV}"
create_topic "events.dlq.${ENV}"

# --- 4.2 Create all 10 subscriptions (V10 Appendix A5 × 2 environments) ---

# DLQ topic must exist before we create subscriptions that reference it as backstop.
# events.raw.{env} → Merge Service
create_subscription \
    "events.raw.${ENV}-merge-sub" \
    "events.raw.${ENV}"

# events.merged.{env} → NetSuite Writer
create_subscription \
    "events.merged.${ENV}-netsuite-sub" \
    "events.merged.${ENV}"

# events.merged.{env} → SuiteX Writer
create_subscription \
    "events.merged.${ENV}-suitex-sub" \
    "events.merged.${ENV}"

# events.error.{env} → Error Handler
# V10 Spec: Exponential backoff; routes to DLQ after 5 failed delivery attempts.
# NOTE: application-level retry counts per V10 O6 are enforced by the consumer:
#   transient=10 retries, 429=5, auth=3, validation/conflict=0 (immediate DLQ).
create_subscription \
    "events.error.${ENV}-retry-sub" \
    "events.error.${ENV}" \
    "--dead-letter-topic=events.dlq.${ENV} --max-delivery-attempts=5 --min-retry-delay=10s --max-retry-delay=600s"

# events.dlq.{env} → DLQ Handler (terminal — no retry)
create_subscription \
    "events.dlq.${ENV}-sub" \
    "events.dlq.${ENV}"


# 5. CREATE CLOUD SQL (LIGHTWEIGHT DATABASE FOR TESTING)
echo "🗄️  5. Provisioning Cloud SQL Instance (This will take ~5-10 minutes)..."
if ! gcloud sql instances describe ${SQL_INSTANCE_NAME} >/dev/null 2>&1; then
    echo "   🕒 Grab a coffee, this will take a few minutes..."
    
    # 📝 COMPLIANCE FLAGS (Adjust based on Environment)
    AVAILABILITY_TYPE="ZONAL"
    STORAGE_AUTO_INCREASE="--storage-auto-increase"
    BACKUP_FLAGS="--backup"
    NETWORK_FLAGS="--assign-ip" # Public IP by default for testing
    
    if [[ "$ENV" == "production" ]]; then
        AVAILABILITY_TYPE="REGIONAL" # High Availability (Requirement 1)
        BACKUP_FLAGS="--backup --enable-bin-log" # Automated Backups + PITR (Requirement 1)
        # Requirement 2: Disable Public IP and use Private IP
        # NOTE: This requires a pre-existing VPC peering (google-managed-services)
        NETWORK_FLAGS="--no-assign-ip --network=projects/${PROJECT_ID}/global/networks/default"
    fi

    gcloud sql instances create ${SQL_INSTANCE_NAME} \
        --database-version=MYSQL_8_0 \
        --tier=db-custom-1-3840 \
        --region=${REGION} \
        --availability-type=${AVAILABILITY_TYPE} \
        ${STORAGE_AUTO_INCREASE} \
        ${BACKUP_FLAGS} \
        ${NETWORK_FLAGS} \
        --root-password="${SQL_ROOT_PASS}" \
        --require-ssl \
        --storage-type=SSD \
        --storage-size=10GB
    echo "   ✅ Cloud SQL instance created."
else
    echo "   ⚡ Instance already exists. Skipping creation..."
fi

# Create Internal Database
if ! gcloud sql databases describe events --instance=${SQL_INSTANCE_NAME} >/dev/null 2>&1; then
    gcloud sql databases create events --instance=${SQL_INSTANCE_NAME}
fi

# Create Laravel User
if ! gcloud sql users describe sync_worker --host="%" --instance=${SQL_INSTANCE_NAME} >/dev/null 2>&1; then
    gcloud sql users create sync_worker --host="%" --instance=${SQL_INSTANCE_NAME} --password="${SQL_USER_PASS}"
    echo "   ✅ sync_worker user created successfully."
fi


# 6. SYNC_WORKER DB PERMISSIONS
# V10 DB-level immutability: sync_worker may only SELECT and INSERT.
# No UPDATE or DELETE privileges are granted to this user.
echo "🔒 6. Documenting sync_worker privilege grant..."
cat <<EOF

   -- Run this against the 'events' database after migrating:
   GRANT SELECT, INSERT ON events.events          TO 'sync_worker'@'%';
   GRANT SELECT, INSERT ON events.event_audit_log TO 'sync_worker'@'%';
   FLUSH PRIVILEGES;
   -- Verify: attempting UPDATE or DELETE as sync_worker must return access-denied.
EOF

# 7. CREDENTIALS SUMMARY
echo "=========================================================="
echo "🎉 PROVISIONING COMPLETED (ENV: $ENV) 🎉"
echo "=========================================================="
echo ""
echo "--- LARAVEL .ENV ---"
echo "DB_EVENT_STORE_CONNECTION=mysql"
echo "DB_EVENT_STORE_HOST=$(gcloud sql instances describe ${SQL_INSTANCE_NAME} --format='value(ipAddresses[0].ipAddress)')"
echo "DB_EVENT_STORE_PORT=3306"
echo "DB_EVENT_STORE_DATABASE=events"
echo "DB_EVENT_STORE_USERNAME=sync_worker"
echo "DB_EVENT_STORE_PASSWORD=${SQL_USER_PASS}"
echo "GCP_PUBSUB_ENVIRONMENT=${ENV}"
echo ""
echo "--- PUBSUB TOPOLOGY (V10 Compliant, 8 topics / 10 subscriptions) ---"
echo "Topics:"
for TYPE in raw merged error dlq; do
    echo "  - events.${TYPE}.${ENV}"
done
echo "Subscriptions:"
echo "  - events.raw.${ENV}-merge-sub          → Merge Service"
echo "  - events.merged.${ENV}-netsuite-sub    → NetSuite Writer"
echo "  - events.merged.${ENV}-suitex-sub      → SuiteX Writer"
echo "  - events.error.${ENV}-retry-sub        → Error Handler (DLQ backstop after 5 attempts)"
echo "  - events.dlq.${ENV}-sub               → DLQ Handler (terminal)"
echo ""
echo "⚠️  Publisher Ordering Key Contract (V10 P2.10):"
echo "   All publishers to events.raw.{env} and events.merged.{env}"
echo "   MUST set orderingKey = '{recordType}:{recordId}' (e.g., project:12345)"
echo "   Topic names MUST be derived from config('services.gcp.environment') — never hardcoded."
echo "=========================================================="
