feat: Add Analytics, Odoo Integration, A/B Testing, and Content features
Phase 1 - Analytics y Reportes: - PostMetrics and AnalyticsReport models for tracking engagement - Analytics service with dashboard stats, top posts, optimal times - 8 API endpoints at /api/analytics/* - Interactive dashboard with Chart.js charts - Celery tasks for metrics fetch (15min) and weekly reports Phase 2 - Integración Odoo: - Lead and OdooSyncLog models for CRM integration - Odoo fields added to Product and Service models - XML-RPC service for bidirectional sync - Lead management API at /api/leads/* - Leads dashboard template - Celery tasks for product/service sync and lead export Phase 3 - A/B Testing y Recycling: - ABTest, ABTestVariant, RecycledPost models - Statistical winner analysis using chi-square test - Content recycling with engagement-based scoring - APIs at /api/ab-tests/* and /api/recycling/* - Automated test evaluation and content recycling tasks Phase 4 - Thread Series y Templates: - ThreadSeries and ThreadPost models for multi-post threads - AI-powered thread generation - Enhanced ImageTemplate with HTML template support - APIs at /api/threads/* and /api/templates/* - Thread scheduling with reply chain support Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -566,6 +566,467 @@ def send_daily_summary():
|
||||
db.close()
|
||||
|
||||
|
||||
# ============================================================
|
||||
# THREAD SERIES TASKS
|
||||
# ============================================================
|
||||
|
||||
@shared_task
|
||||
def check_thread_schedules():
|
||||
"""
|
||||
Check for thread posts that need to be published.
|
||||
Runs every minute via Celery Beat.
|
||||
"""
|
||||
logger.info("Checking thread schedules...")
|
||||
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
from app.models.thread_series import ThreadSeries, ThreadPost
|
||||
|
||||
now = datetime.utcnow()
|
||||
window_start = now - timedelta(minutes=1)
|
||||
|
||||
# Find thread posts scheduled for now
|
||||
scheduled_posts = db.query(ThreadPost).filter(
|
||||
ThreadPost.status == "scheduled",
|
||||
ThreadPost.scheduled_at <= now,
|
||||
ThreadPost.scheduled_at > window_start
|
||||
).all()
|
||||
|
||||
published = 0
|
||||
|
||||
for thread_post in scheduled_posts:
|
||||
try:
|
||||
# Queue the publish task for the actual post
|
||||
if thread_post.post_id:
|
||||
publish_post.delay(thread_post.post_id)
|
||||
published += 1
|
||||
logger.info(f"Queued thread post {thread_post.id} (post {thread_post.post_id})")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error publishing thread post {thread_post.id}: {e}")
|
||||
thread_post.status = "failed"
|
||||
thread_post.error_message = str(e)
|
||||
|
||||
db.commit()
|
||||
|
||||
logger.info(f"Queued {published} thread posts for publishing")
|
||||
|
||||
return {"success": True, "published": published}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in check_thread_schedules: {e}")
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@shared_task
|
||||
def update_thread_post_status(post_id: int, platform_post_id: str = None):
|
||||
"""
|
||||
Update thread post status after publishing.
|
||||
|
||||
Args:
|
||||
post_id: The Post ID that was published
|
||||
platform_post_id: The platform-specific post ID (for reply chains)
|
||||
"""
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
from app.models.thread_series import ThreadSeries, ThreadPost
|
||||
from app.models.post import Post
|
||||
|
||||
# Find the thread post associated with this post
|
||||
thread_post = db.query(ThreadPost).filter(
|
||||
ThreadPost.post_id == post_id
|
||||
).first()
|
||||
|
||||
if not thread_post:
|
||||
return {"success": True, "message": "Not a thread post"}
|
||||
|
||||
post = db.query(Post).filter(Post.id == post_id).first()
|
||||
|
||||
if post and post.status == "published":
|
||||
thread_post.status = "published"
|
||||
thread_post.published_at = datetime.utcnow()
|
||||
|
||||
if platform_post_id:
|
||||
thread_post.platform_post_id = platform_post_id
|
||||
|
||||
# Update series progress
|
||||
series = db.query(ThreadSeries).filter(
|
||||
ThreadSeries.id == thread_post.series_id
|
||||
).first()
|
||||
|
||||
if series:
|
||||
series.posts_published = db.query(ThreadPost).filter(
|
||||
ThreadPost.series_id == series.id,
|
||||
ThreadPost.status == "published"
|
||||
).count()
|
||||
|
||||
# Check if series is complete
|
||||
if series.posts_published >= series.total_posts:
|
||||
series.status = "completed"
|
||||
series.completed_at = datetime.utcnow()
|
||||
|
||||
# Store first post ID for reply chain
|
||||
if thread_post.sequence_number == 1 and platform_post_id:
|
||||
series.first_platform_post_id = platform_post_id
|
||||
|
||||
elif post and post.status == "failed":
|
||||
thread_post.status = "failed"
|
||||
thread_post.error_message = post.error_message
|
||||
|
||||
db.commit()
|
||||
|
||||
return {"success": True}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating thread post status: {e}")
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
# ============================================================
|
||||
# A/B TESTING & RECYCLING TASKS
|
||||
# ============================================================
|
||||
|
||||
@shared_task
|
||||
def evaluate_ab_tests():
|
||||
"""
|
||||
Evaluate running A/B tests and update metrics.
|
||||
Runs every hour via Celery Beat.
|
||||
"""
|
||||
logger.info("Evaluating A/B tests...")
|
||||
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
from app.models.ab_test import ABTest
|
||||
from app.services.ab_testing_service import ab_testing_service
|
||||
|
||||
# Get running tests
|
||||
running_tests = db.query(ABTest).filter(
|
||||
ABTest.status == "running"
|
||||
).all()
|
||||
|
||||
evaluated = 0
|
||||
|
||||
for test in running_tests:
|
||||
try:
|
||||
# Check if test duration has elapsed
|
||||
if test.started_at:
|
||||
elapsed_hours = (datetime.utcnow() - test.started_at).total_seconds() / 3600
|
||||
if elapsed_hours >= test.duration_hours:
|
||||
# Evaluate and complete the test
|
||||
result = run_async(ab_testing_service.evaluate_test(test.id))
|
||||
logger.info(f"Evaluated A/B test {test.id}: {result}")
|
||||
evaluated += 1
|
||||
else:
|
||||
# Just update metrics
|
||||
run_async(ab_testing_service.update_variant_metrics(test.id))
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error evaluating test {test.id}: {e}")
|
||||
|
||||
logger.info(f"Evaluated {evaluated} A/B tests")
|
||||
|
||||
return {"success": True, "evaluated": evaluated}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in evaluate_ab_tests: {e}")
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@shared_task
|
||||
def auto_recycle_content():
|
||||
"""
|
||||
Automatically recycle high-performing content.
|
||||
Runs daily at 2 AM via Celery Beat.
|
||||
"""
|
||||
logger.info("Auto-recycling content...")
|
||||
|
||||
try:
|
||||
from app.services.recycling_service import recycling_service
|
||||
|
||||
# Recycle 1 post per platform with high engagement
|
||||
platforms = ["x", "threads"]
|
||||
total_recycled = 0
|
||||
|
||||
for platform in platforms:
|
||||
result = run_async(recycling_service.auto_recycle(
|
||||
platform=platform,
|
||||
count=1,
|
||||
min_engagement_rate=3.0 # Only recycle really good posts
|
||||
))
|
||||
|
||||
if result.get("success"):
|
||||
total_recycled += result.get("recycled", 0)
|
||||
logger.info(f"Auto-recycled {result.get('recycled', 0)} posts for {platform}")
|
||||
|
||||
return {"success": True, "recycled": total_recycled}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in auto_recycle_content: {e}")
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
|
||||
# ============================================================
|
||||
# ODOO SYNC TASKS
|
||||
# ============================================================
|
||||
|
||||
@shared_task
|
||||
def sync_products_from_odoo():
|
||||
"""
|
||||
Sync products from Odoo ERP.
|
||||
Runs daily at 6 AM via Celery Beat.
|
||||
"""
|
||||
logger.info("Syncing products from Odoo...")
|
||||
|
||||
if not settings.ODOO_SYNC_ENABLED:
|
||||
logger.info("Odoo sync disabled, skipping")
|
||||
return {"success": False, "error": "Odoo sync disabled"}
|
||||
|
||||
try:
|
||||
from app.services.odoo_service import odoo_service
|
||||
|
||||
result = run_async(odoo_service.sync_products(limit=200))
|
||||
|
||||
if result.get("success"):
|
||||
logger.info(f"Synced {result.get('processed', 0)} products from Odoo")
|
||||
else:
|
||||
logger.error(f"Odoo product sync failed: {result.get('error')}")
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in sync_products_from_odoo: {e}")
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
|
||||
@shared_task
|
||||
def sync_services_from_odoo():
|
||||
"""
|
||||
Sync services from Odoo ERP.
|
||||
Runs daily at 6 AM via Celery Beat.
|
||||
"""
|
||||
logger.info("Syncing services from Odoo...")
|
||||
|
||||
if not settings.ODOO_SYNC_ENABLED:
|
||||
logger.info("Odoo sync disabled, skipping")
|
||||
return {"success": False, "error": "Odoo sync disabled"}
|
||||
|
||||
try:
|
||||
from app.services.odoo_service import odoo_service
|
||||
|
||||
result = run_async(odoo_service.sync_services(limit=100))
|
||||
|
||||
if result.get("success"):
|
||||
logger.info(f"Synced {result.get('processed', 0)} services from Odoo")
|
||||
else:
|
||||
logger.error(f"Odoo service sync failed: {result.get('error')}")
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in sync_services_from_odoo: {e}")
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
|
||||
@shared_task
|
||||
def export_leads_to_odoo():
|
||||
"""
|
||||
Export unsynced leads to Odoo CRM.
|
||||
Runs every hour via Celery Beat.
|
||||
"""
|
||||
logger.info("Exporting leads to Odoo...")
|
||||
|
||||
if not settings.ODOO_SYNC_ENABLED:
|
||||
logger.info("Odoo sync disabled, skipping")
|
||||
return {"success": False, "error": "Odoo sync disabled"}
|
||||
|
||||
try:
|
||||
from app.services.odoo_service import odoo_service
|
||||
|
||||
result = run_async(odoo_service.export_leads_to_odoo())
|
||||
|
||||
if result.get("success"):
|
||||
logger.info(f"Exported {result.get('created', 0)} leads to Odoo")
|
||||
else:
|
||||
logger.error(f"Odoo lead export failed: {result.get('error')}")
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in export_leads_to_odoo: {e}")
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
|
||||
# ============================================================
|
||||
# ANALYTICS TASKS
|
||||
# ============================================================
|
||||
|
||||
@shared_task
|
||||
def fetch_post_metrics():
|
||||
"""
|
||||
Fetch and record metrics for recent posts.
|
||||
Runs every 15 minutes via Celery Beat.
|
||||
"""
|
||||
logger.info("Fetching post metrics...")
|
||||
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
from app.models.post import Post
|
||||
from app.services.analytics_service import analytics_service
|
||||
from app.publishers.manager import publisher_manager, Platform
|
||||
|
||||
# Get posts published in the last 7 days
|
||||
recent_posts = db.query(Post).filter(
|
||||
Post.status == "published",
|
||||
Post.published_at >= datetime.utcnow() - timedelta(days=7)
|
||||
).all()
|
||||
|
||||
updated = 0
|
||||
|
||||
for post in recent_posts:
|
||||
for platform_name in post.platforms:
|
||||
try:
|
||||
platform = Platform(platform_name)
|
||||
publisher = publisher_manager.get_publisher(platform)
|
||||
|
||||
if not publisher:
|
||||
continue
|
||||
|
||||
# Get platform post ID
|
||||
platform_ids = post.platform_post_ids or {}
|
||||
platform_post_id = platform_ids.get(platform_name)
|
||||
|
||||
if not platform_post_id:
|
||||
continue
|
||||
|
||||
# Fetch metrics from platform API
|
||||
metrics = run_async(publisher.get_post_metrics(platform_post_id))
|
||||
|
||||
if metrics:
|
||||
# Record metrics snapshot
|
||||
run_async(analytics_service.record_post_metrics(
|
||||
post_id=post.id,
|
||||
platform=platform_name,
|
||||
metrics=metrics
|
||||
))
|
||||
|
||||
# Update post.metrics with latest
|
||||
if not post.metrics:
|
||||
post.metrics = {}
|
||||
|
||||
post.metrics.update({
|
||||
"likes": metrics.get("likes", post.metrics.get("likes", 0)),
|
||||
"comments": metrics.get("comments", post.metrics.get("comments", 0)),
|
||||
"shares": metrics.get("shares", 0) + metrics.get("retweets", 0),
|
||||
"impressions": metrics.get("impressions", post.metrics.get("impressions", 0)),
|
||||
"reach": metrics.get("reach", post.metrics.get("reach", 0))
|
||||
})
|
||||
|
||||
updated += 1
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching metrics for post {post.id} on {platform_name}: {e}")
|
||||
|
||||
db.commit()
|
||||
logger.info(f"Updated metrics for {updated} post-platform combinations")
|
||||
|
||||
return {"success": True, "updated": updated}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in fetch_post_metrics: {e}")
|
||||
db.rollback()
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@shared_task
|
||||
def generate_weekly_analytics_report():
|
||||
"""
|
||||
Generate and optionally send weekly analytics report.
|
||||
Runs every Sunday at 9 AM via Celery Beat.
|
||||
"""
|
||||
logger.info("Generating weekly analytics report...")
|
||||
|
||||
try:
|
||||
from app.services.analytics_service import analytics_service
|
||||
from app.services.notifications import notification_service
|
||||
|
||||
# Generate report
|
||||
report = run_async(analytics_service.generate_weekly_report())
|
||||
|
||||
logger.info(f"Generated report {report.id} for {report.period_start} - {report.period_end}")
|
||||
|
||||
# Send via Telegram if configured
|
||||
if settings.TELEGRAM_BOT_TOKEN and settings.TELEGRAM_CHAT_ID:
|
||||
if report.summary_text:
|
||||
success = run_async(notification_service.notify_daily_summary({
|
||||
"custom_message": report.summary_text
|
||||
}))
|
||||
|
||||
if success:
|
||||
logger.info("Weekly report sent to Telegram")
|
||||
else:
|
||||
logger.warning("Failed to send report to Telegram")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"report_id": report.id,
|
||||
"total_posts": report.total_posts,
|
||||
"total_engagements": report.total_engagements
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating weekly report: {e}")
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
|
||||
@shared_task
|
||||
def recalculate_optimal_times():
|
||||
"""
|
||||
Recalculate optimal posting times based on historical data.
|
||||
Runs weekly via Celery Beat.
|
||||
"""
|
||||
logger.info("Recalculating optimal posting times...")
|
||||
|
||||
try:
|
||||
from app.services.analytics_service import analytics_service
|
||||
|
||||
# Calculate for each platform
|
||||
platforms = ["x", "threads", "instagram", "facebook", None]
|
||||
results = {}
|
||||
|
||||
for platform in platforms:
|
||||
times = run_async(analytics_service.get_optimal_times(
|
||||
platform=platform,
|
||||
days=90
|
||||
))
|
||||
|
||||
platform_key = platform or "all"
|
||||
results[platform_key] = len(times)
|
||||
|
||||
logger.info(f"Calculated {len(times)} optimal time slots for {platform_key}")
|
||||
|
||||
return {"success": True, "results": results}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error recalculating optimal times: {e}")
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
|
||||
# ============================================================
|
||||
# MAINTENANCE TASKS
|
||||
# ============================================================
|
||||
|
||||
Reference in New Issue
Block a user