Testing: - Add pytest configuration (pytest.ini) - Add test fixtures (tests/conftest.py) - Add ContentGenerator tests (13 tests) - Add ContentScheduler tests (16 tests) - Add PublisherManager tests (16 tests) - All 45 tests passing Production Docker: - Add docker-compose.prod.yml with healthchecks, resource limits - Add Dockerfile.prod with multi-stage build, non-root user - Add nginx.prod.conf with SSL, rate limiting, security headers - Add .env.prod.example template Maintenance Scripts: - Add backup.sh for database and media backups - Add restore.sh for database restoration - Add cleanup.sh for log rotation and Docker cleanup - Add healthcheck.sh with Telegram alerts Documentation: - Add DEPLOY.md with complete deployment guide Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
122 lines
4.0 KiB
Bash
Executable File
122 lines
4.0 KiB
Bash
Executable File
#!/bin/bash
|
|
# ===========================================
|
|
# Backup Script for Social Media Automation
|
|
# Run daily via cron:
|
|
# 0 2 * * * /path/to/backup.sh >> /var/log/backup.log 2>&1
|
|
# ===========================================
|
|
|
|
set -e
|
|
|
|
# Configuration
|
|
BACKUP_DIR="${BACKUP_DIR:-/root/Facebook-X-Threads-Automation/backups}"
|
|
RETENTION_DAYS="${RETENTION_DAYS:-7}"
|
|
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
|
CONTAINER_NAME="${CONTAINER_NAME:-social-automation-db}"
|
|
|
|
# Colors for output
|
|
RED='\033[0;31m'
|
|
GREEN='\033[0;32m'
|
|
YELLOW='\033[1;33m'
|
|
NC='\033[0m'
|
|
|
|
log() {
|
|
echo -e "[$(date '+%Y-%m-%d %H:%M:%S')] $1"
|
|
}
|
|
|
|
error() {
|
|
log "${RED}ERROR: $1${NC}"
|
|
exit 1
|
|
}
|
|
|
|
success() {
|
|
log "${GREEN}$1${NC}"
|
|
}
|
|
|
|
warning() {
|
|
log "${YELLOW}$1${NC}"
|
|
}
|
|
|
|
# Create backup directory if not exists
|
|
mkdir -p "$BACKUP_DIR"/{database,media}
|
|
|
|
log "Starting backup process..."
|
|
|
|
# ===========================================
|
|
# 1. DATABASE BACKUP
|
|
# ===========================================
|
|
log "Backing up PostgreSQL database..."
|
|
|
|
DB_BACKUP_FILE="$BACKUP_DIR/database/db_backup_$TIMESTAMP.sql.gz"
|
|
|
|
# Check if container is running
|
|
if ! docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
|
|
error "Database container '$CONTAINER_NAME' is not running"
|
|
fi
|
|
|
|
# Get database credentials from container
|
|
POSTGRES_USER=$(docker exec $CONTAINER_NAME printenv POSTGRES_USER 2>/dev/null || echo "social_user")
|
|
POSTGRES_DB=$(docker exec $CONTAINER_NAME printenv POSTGRES_DB 2>/dev/null || echo "social_automation")
|
|
|
|
# Perform backup
|
|
if docker exec $CONTAINER_NAME pg_dump -U "$POSTGRES_USER" "$POSTGRES_DB" | gzip > "$DB_BACKUP_FILE"; then
|
|
DB_SIZE=$(du -h "$DB_BACKUP_FILE" | cut -f1)
|
|
success "Database backup completed: $DB_BACKUP_FILE ($DB_SIZE)"
|
|
else
|
|
error "Database backup failed"
|
|
fi
|
|
|
|
# ===========================================
|
|
# 2. MEDIA FILES BACKUP
|
|
# ===========================================
|
|
log "Backing up media files..."
|
|
|
|
MEDIA_BACKUP_FILE="$BACKUP_DIR/media/media_backup_$TIMESTAMP.tar.gz"
|
|
UPLOADS_DIR="/root/Facebook-X-Threads-Automation/uploads"
|
|
|
|
if [ -d "$UPLOADS_DIR" ] && [ "$(ls -A $UPLOADS_DIR 2>/dev/null)" ]; then
|
|
if tar -czf "$MEDIA_BACKUP_FILE" -C "$(dirname $UPLOADS_DIR)" "$(basename $UPLOADS_DIR)"; then
|
|
MEDIA_SIZE=$(du -h "$MEDIA_BACKUP_FILE" | cut -f1)
|
|
success "Media backup completed: $MEDIA_BACKUP_FILE ($MEDIA_SIZE)"
|
|
else
|
|
warning "Media backup failed or partially completed"
|
|
fi
|
|
else
|
|
warning "No media files to backup"
|
|
fi
|
|
|
|
# ===========================================
|
|
# 3. CLEANUP OLD BACKUPS
|
|
# ===========================================
|
|
log "Cleaning up backups older than $RETENTION_DAYS days..."
|
|
|
|
# Count files before cleanup
|
|
DB_BEFORE=$(find "$BACKUP_DIR/database" -name "*.sql.gz" -type f 2>/dev/null | wc -l)
|
|
MEDIA_BEFORE=$(find "$BACKUP_DIR/media" -name "*.tar.gz" -type f 2>/dev/null | wc -l)
|
|
|
|
# Delete old files
|
|
find "$BACKUP_DIR/database" -name "*.sql.gz" -type f -mtime +$RETENTION_DAYS -delete 2>/dev/null || true
|
|
find "$BACKUP_DIR/media" -name "*.tar.gz" -type f -mtime +$RETENTION_DAYS -delete 2>/dev/null || true
|
|
|
|
# Count files after cleanup
|
|
DB_AFTER=$(find "$BACKUP_DIR/database" -name "*.sql.gz" -type f 2>/dev/null | wc -l)
|
|
MEDIA_AFTER=$(find "$BACKUP_DIR/media" -name "*.tar.gz" -type f 2>/dev/null | wc -l)
|
|
|
|
DB_DELETED=$((DB_BEFORE - DB_AFTER))
|
|
MEDIA_DELETED=$((MEDIA_BEFORE - MEDIA_AFTER))
|
|
|
|
if [ $DB_DELETED -gt 0 ] || [ $MEDIA_DELETED -gt 0 ]; then
|
|
log "Deleted $DB_DELETED database backup(s) and $MEDIA_DELETED media backup(s)"
|
|
fi
|
|
|
|
# ===========================================
|
|
# 4. SUMMARY
|
|
# ===========================================
|
|
log "─────────────────────────────────────────"
|
|
log "Backup Summary:"
|
|
log " Database backups: $DB_AFTER"
|
|
log " Media backups: $MEDIA_AFTER"
|
|
log " Total size: $(du -sh $BACKUP_DIR | cut -f1)"
|
|
log "─────────────────────────────────────────"
|
|
|
|
success "Backup process completed successfully!"
|