# ============================================================================= # GitHub Actions - Tareas de Mantenimiento # Fase 7.4 - Go Live y Soporte # ============================================================================= # # Este workflow ejecuta tareas de mantenimiento programadas: # - Backup de base de datos # - Limpieza de logs # - Verificaciรณn de dependencias # - Escaneo de seguridad # ============================================================================= name: Maintenance Tasks on: schedule: # Ejecutar diariamente a las 3 AM UTC - cron: '0 3 * * *' workflow_dispatch: inputs: task: description: 'Tarea a ejecutar' required: true default: 'backup' type: choice options: - backup - cleanup - security-scan - all env: NODE_VERSION: '20' jobs: # =========================================================================== # Job 1: Database Backup # =========================================================================== backup: name: ๐Ÿ’พ Database Backup runs-on: ubuntu-latest if: github.event.schedule || github.event.inputs.task == 'backup' || github.event.inputs.task == 'all' environment: production steps: - name: ๐Ÿ“ฅ Checkout code uses: actions/checkout@v4 - name: ๐Ÿ” Setup SSH uses: webfactory/ssh-agent@v0.8.0 with: ssh-private-key: ${{ secrets.SSH_PRIVATE_KEY }} - name: ๐Ÿ’พ Run Backup env: SERVER_HOST: ${{ secrets.SERVER_HOST }} SERVER_USER: ${{ secrets.SERVER_USER }} run: | mkdir -p ~/.ssh ssh-keyscan -H $SERVER_HOST >> ~/.ssh/known_hosts ssh $SERVER_USER@$SERVER_HOST << 'EOF' cd ~/padel-prod # Ejecutar backup docker-compose -f docker-compose.prod.yml exec -T postgres \ pg_dump -U padeluser padeldb | gzip > backups/backup-$(date +%Y%m%d-%H%M%S).sql.gz # Limpiar backups antiguos (mantener 30 dรญas) find backups -name "backup-*.sql.gz" -type f -mtime +30 -delete echo "Backup completed!" ls -lh backups/ EOF - name: โ˜๏ธ Upload to S3 if: env.AWS_ACCESS_KEY_ID != '' env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }} SERVER_HOST: ${{ secrets.SERVER_HOST }} SERVER_USER: ${{ secrets.SERVER_USER }} run: | ssh $SERVER_USER@$SERVER_HOST << 'EOF' cd ~/padel-prod/backups # Subir รบltimo backup a S3 LATEST=$(ls -t backup-*.sql.gz | head -1) aws s3 cp "$LATEST" s3://${{ secrets.S3_BACKUP_BUCKET }}/backups/ \ --storage-class STANDARD_IA echo "Backup uploaded to S3: $LATEST" EOF continue-on-error: true - name: ๐Ÿ”” Notify if: always() uses: 8398a7/action-slack@v3 with: status: ${{ job.status }} channel: '#maintenance' text: 'Database backup ${{ job.status }}' env: SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} continue-on-error: true # =========================================================================== # Job 2: Cleanup Logs and Temp Files # =========================================================================== cleanup: name: ๐Ÿงน Cleanup runs-on: ubuntu-latest if: github.event.schedule || github.event.inputs.task == 'cleanup' || github.event.inputs.task == 'all' environment: production steps: - name: ๐Ÿ” Setup SSH uses: webfactory/ssh-agent@v0.8.0 with: ssh-private-key: ${{ secrets.SSH_PRIVATE_KEY }} - name: ๐Ÿงน Run Cleanup env: SERVER_HOST: ${{ secrets.SERVER_HOST }} SERVER_USER: ${{ secrets.SERVER_USER }} run: | mkdir -p ~/.ssh ssh-keyscan -H $SERVER_HOST >> ~/.ssh/known_hosts ssh $SERVER_USER@$SERVER_HOST << 'EOF' cd ~/padel-prod # Limpiar logs antiguos docker-compose -f docker-compose.prod.yml exec -T app \ node dist/scripts/cleanup-logs.js || true # Limpiar Docker docker system prune -f --volumes docker volume prune -f # Limpiar archivos temporales sudo find /tmp -type f -atime +7 -delete 2>/dev/null || true echo "Cleanup completed!" df -h EOF # =========================================================================== # Job 3: Security Scan # =========================================================================== security-scan: name: ๐Ÿ”’ Security Scan runs-on: ubuntu-latest if: github.event.schedule || github.event.inputs.task == 'security-scan' || github.event.inputs.task == 'all' steps: - name: ๐Ÿ“ฅ Checkout code uses: actions/checkout@v4 - name: โš™๏ธ Setup Node.js uses: actions/setup-node@v4 with: node-version: ${{ env.NODE_VERSION }} cache: 'npm' cache-dependency-path: './backend/package-lock.json' - name: ๐Ÿ“ฆ Install dependencies working-directory: ./backend run: npm ci - name: ๐Ÿ” Run npm audit working-directory: ./backend run: npm audit --audit-level=high continue-on-error: true - name: ๐Ÿณ Run Trivy vulnerability scanner uses: aquasecurity/trivy-action@master with: image-ref: ghcr.io/${{ github.repository }}:latest format: 'sarif' output: 'trivy-results.sarif' continue-on-error: true - name: ๐Ÿ“ค Upload scan results uses: github/codeql-action/upload-sarif@v2 if: always() with: sarif_file: 'trivy-results.sarif' continue-on-error: true - name: ๐Ÿ”” Notify if: always() uses: 8398a7/action-slack@v3 with: status: ${{ job.status }} channel: '#security' text: 'Security scan ${{ job.status }}' env: SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} continue-on-error: true # =========================================================================== # Job 4: Health Check # =========================================================================== health-check: name: ๐Ÿฅ Health Check runs-on: ubuntu-latest if: github.event.schedule environment: production steps: - name: ๐Ÿฅ Check API Health run: | curl -sf https://api.tudominio.com/api/v1/health || exit 1 echo "API is healthy!" - name: ๐Ÿ“Š Get Metrics run: | curl -sf https://api.tudominio.com/api/v1/health/metrics | jq '.' continue-on-error: true - name: ๐Ÿ”” Notify on Failure if: failure() uses: 8398a7/action-slack@v3 with: status: ${{ job.status }} channel: '#alerts' text: '๐Ÿšจ Health check FAILED for production API!' env: SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}