feat: Add Analytics, Odoo Integration, A/B Testing, and Content features
Phase 1 - Analytics y Reportes: - PostMetrics and AnalyticsReport models for tracking engagement - Analytics service with dashboard stats, top posts, optimal times - 8 API endpoints at /api/analytics/* - Interactive dashboard with Chart.js charts - Celery tasks for metrics fetch (15min) and weekly reports Phase 2 - Integración Odoo: - Lead and OdooSyncLog models for CRM integration - Odoo fields added to Product and Service models - XML-RPC service for bidirectional sync - Lead management API at /api/leads/* - Leads dashboard template - Celery tasks for product/service sync and lead export Phase 3 - A/B Testing y Recycling: - ABTest, ABTestVariant, RecycledPost models - Statistical winner analysis using chi-square test - Content recycling with engagement-based scoring - APIs at /api/ab-tests/* and /api/recycling/* - Automated test evaluation and content recycling tasks Phase 4 - Thread Series y Templates: - ThreadSeries and ThreadPost models for multi-post threads - AI-powered thread generation - Enhanced ImageTemplate with HTML template support - APIs at /api/threads/* and /api/templates/* - Thread scheduling with reply chain support Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
257
app/api/routes/analytics.py
Normal file
257
app/api/routes/analytics.py
Normal file
@@ -0,0 +1,257 @@
|
||||
"""
|
||||
API Routes for Analytics.
|
||||
"""
|
||||
|
||||
from datetime import date, datetime
|
||||
from typing import Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy.orm import Session
|
||||
from pydantic import BaseModel
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.services.analytics_service import analytics_service
|
||||
from app.services.notifications import notification_service
|
||||
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
class DashboardResponse(BaseModel):
|
||||
period_days: int
|
||||
total_posts: int
|
||||
total_impressions: int
|
||||
total_engagements: int
|
||||
total_likes: int
|
||||
total_comments: int
|
||||
total_shares: int
|
||||
avg_engagement_rate: float
|
||||
platform_breakdown: dict
|
||||
content_breakdown: dict
|
||||
pending_interactions: int
|
||||
|
||||
|
||||
@router.get("/dashboard")
|
||||
async def get_analytics_dashboard(
|
||||
days: int = Query(30, ge=1, le=365),
|
||||
platform: Optional[str] = None,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get analytics dashboard data.
|
||||
|
||||
- **days**: Number of days to analyze (default 30)
|
||||
- **platform**: Filter by platform (optional)
|
||||
"""
|
||||
stats = await analytics_service.get_dashboard_stats(days=days, platform=platform)
|
||||
return stats
|
||||
|
||||
|
||||
@router.get("/top-posts")
|
||||
async def get_top_posts(
|
||||
days: int = Query(30, ge=1, le=365),
|
||||
limit: int = Query(10, ge=1, le=50),
|
||||
platform: Optional[str] = None,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get top performing posts by engagement rate.
|
||||
|
||||
- **days**: Number of days to analyze
|
||||
- **limit**: Maximum number of posts to return
|
||||
- **platform**: Filter by platform (optional)
|
||||
"""
|
||||
posts = await analytics_service.get_top_posts(
|
||||
days=days,
|
||||
limit=limit,
|
||||
platform=platform
|
||||
)
|
||||
return {"posts": posts, "count": len(posts)}
|
||||
|
||||
|
||||
@router.get("/optimal-times")
|
||||
async def get_optimal_times(
|
||||
platform: Optional[str] = None,
|
||||
days: int = Query(90, ge=30, le=365),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get optimal posting times based on historical performance.
|
||||
|
||||
- **platform**: Filter by platform (optional)
|
||||
- **days**: Days of historical data to analyze
|
||||
"""
|
||||
times = await analytics_service.get_optimal_times(
|
||||
platform=platform,
|
||||
days=days
|
||||
)
|
||||
return {
|
||||
"optimal_times": times[:20],
|
||||
"analysis_period_days": days,
|
||||
"platform": platform
|
||||
}
|
||||
|
||||
|
||||
@router.get("/reports")
|
||||
async def get_reports(
|
||||
report_type: str = Query("weekly", regex="^(daily|weekly|monthly)$"),
|
||||
limit: int = Query(10, ge=1, le=52),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get historical analytics reports.
|
||||
|
||||
- **report_type**: Type of report (daily, weekly, monthly)
|
||||
- **limit**: Maximum number of reports to return
|
||||
"""
|
||||
reports = await analytics_service.get_reports(
|
||||
report_type=report_type,
|
||||
limit=limit
|
||||
)
|
||||
return {"reports": reports, "count": len(reports)}
|
||||
|
||||
|
||||
@router.post("/reports/generate")
|
||||
async def generate_report(
|
||||
report_type: str = Query("weekly", regex="^(daily|weekly|monthly)$"),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Generate a new analytics report.
|
||||
|
||||
- **report_type**: Type of report to generate
|
||||
"""
|
||||
if report_type == "weekly":
|
||||
report = await analytics_service.generate_weekly_report()
|
||||
return {
|
||||
"message": "Reporte generado exitosamente",
|
||||
"report": report.to_dict()
|
||||
}
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Report type '{report_type}' not implemented yet"
|
||||
)
|
||||
|
||||
|
||||
@router.post("/reports/send-telegram")
|
||||
async def send_report_telegram(
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Generate and send weekly report via Telegram.
|
||||
"""
|
||||
try:
|
||||
# Generate report
|
||||
report = await analytics_service.generate_weekly_report()
|
||||
|
||||
# Send via Telegram
|
||||
if report.summary_text:
|
||||
success = await notification_service.notify_daily_summary({
|
||||
"custom_message": report.summary_text
|
||||
})
|
||||
|
||||
if success:
|
||||
return {
|
||||
"message": "Reporte enviado a Telegram",
|
||||
"report_id": report.id
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"message": "Reporte generado pero no se pudo enviar a Telegram",
|
||||
"report_id": report.id
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"message": "Reporte generado sin resumen",
|
||||
"report_id": report.id
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Error generando reporte: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/posts/{post_id}/metrics")
|
||||
async def get_post_metrics(
|
||||
post_id: int,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get detailed metrics for a specific post.
|
||||
"""
|
||||
from app.models.post import Post
|
||||
from app.models.post_metrics import PostMetrics
|
||||
|
||||
post = db.query(Post).filter(Post.id == post_id).first()
|
||||
if not post:
|
||||
raise HTTPException(status_code=404, detail="Post not found")
|
||||
|
||||
# Get metrics history
|
||||
metrics_history = db.query(PostMetrics).filter(
|
||||
PostMetrics.post_id == post_id
|
||||
).order_by(PostMetrics.recorded_at.desc()).limit(100).all()
|
||||
|
||||
return {
|
||||
"post_id": post_id,
|
||||
"current_metrics": post.metrics,
|
||||
"published_at": post.published_at.isoformat() if post.published_at else None,
|
||||
"platforms": post.platforms,
|
||||
"metrics_history": [m.to_dict() for m in metrics_history]
|
||||
}
|
||||
|
||||
|
||||
@router.get("/engagement-trend")
|
||||
async def get_engagement_trend(
|
||||
days: int = Query(30, ge=7, le=365),
|
||||
platform: Optional[str] = None,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get engagement trend over time for charting.
|
||||
"""
|
||||
from app.models.post import Post
|
||||
from datetime import timedelta
|
||||
|
||||
start_date = datetime.utcnow() - timedelta(days=days)
|
||||
|
||||
posts_query = db.query(Post).filter(
|
||||
Post.published_at >= start_date,
|
||||
Post.status == "published"
|
||||
)
|
||||
|
||||
if platform:
|
||||
posts_query = posts_query.filter(Post.platforms.contains([platform]))
|
||||
|
||||
posts = posts_query.order_by(Post.published_at).all()
|
||||
|
||||
# Group by day
|
||||
daily_data = {}
|
||||
for post in posts:
|
||||
if post.published_at:
|
||||
day_key = post.published_at.strftime("%Y-%m-%d")
|
||||
if day_key not in daily_data:
|
||||
daily_data[day_key] = {
|
||||
"date": day_key,
|
||||
"posts": 0,
|
||||
"impressions": 0,
|
||||
"engagements": 0
|
||||
}
|
||||
daily_data[day_key]["posts"] += 1
|
||||
if post.metrics:
|
||||
daily_data[day_key]["impressions"] += post.metrics.get("impressions", 0)
|
||||
daily_data[day_key]["engagements"] += (
|
||||
post.metrics.get("likes", 0) +
|
||||
post.metrics.get("comments", 0) +
|
||||
post.metrics.get("shares", 0)
|
||||
)
|
||||
|
||||
# Sort by date
|
||||
trend = sorted(daily_data.values(), key=lambda x: x["date"])
|
||||
|
||||
return {
|
||||
"trend": trend,
|
||||
"period_days": days,
|
||||
"platform": platform
|
||||
}
|
||||
Reference in New Issue
Block a user