feat: Add Analytics, Odoo Integration, A/B Testing, and Content features
Phase 1 - Analytics y Reportes: - PostMetrics and AnalyticsReport models for tracking engagement - Analytics service with dashboard stats, top posts, optimal times - 8 API endpoints at /api/analytics/* - Interactive dashboard with Chart.js charts - Celery tasks for metrics fetch (15min) and weekly reports Phase 2 - Integración Odoo: - Lead and OdooSyncLog models for CRM integration - Odoo fields added to Product and Service models - XML-RPC service for bidirectional sync - Lead management API at /api/leads/* - Leads dashboard template - Celery tasks for product/service sync and lead export Phase 3 - A/B Testing y Recycling: - ABTest, ABTestVariant, RecycledPost models - Statistical winner analysis using chi-square test - Content recycling with engagement-based scoring - APIs at /api/ab-tests/* and /api/recycling/* - Automated test evaluation and content recycling tasks Phase 4 - Thread Series y Templates: - ThreadSeries and ThreadPost models for multi-post threads - AI-powered thread generation - Enhanced ImageTemplate with HTML template support - APIs at /api/threads/* and /api/templates/* - Thread scheduling with reply chain support Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
396
app/services/ab_testing_service.py
Normal file
396
app/services/ab_testing_service.py
Normal file
@@ -0,0 +1,396 @@
|
||||
"""
|
||||
A/B Testing Service - Create and manage content experiments.
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Dict, Optional
|
||||
import logging
|
||||
from scipy import stats
|
||||
|
||||
from app.core.database import SessionLocal
|
||||
from app.models.ab_test import ABTest, ABTestVariant
|
||||
from app.models.post import Post
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ABTestingService:
|
||||
"""Service for A/B testing content variants."""
|
||||
|
||||
def _get_db(self):
|
||||
"""Get database session."""
|
||||
return SessionLocal()
|
||||
|
||||
async def create_test(
|
||||
self,
|
||||
name: str,
|
||||
platform: str,
|
||||
variants: List[Dict],
|
||||
test_type: str = "content",
|
||||
duration_hours: int = 24,
|
||||
min_sample_size: int = 100,
|
||||
success_metric: str = "engagement_rate",
|
||||
description: str = None
|
||||
) -> ABTest:
|
||||
"""
|
||||
Create a new A/B test with variants.
|
||||
|
||||
Args:
|
||||
name: Test name
|
||||
platform: Target platform
|
||||
variants: List of variant data [{"name": "A", "content": "...", "hashtags": [...]}]
|
||||
test_type: Type of test (content, timing, hashtags, image)
|
||||
duration_hours: How long to run the test
|
||||
min_sample_size: Minimum impressions per variant
|
||||
success_metric: Metric to optimize for
|
||||
description: Optional description
|
||||
|
||||
Returns:
|
||||
Created ABTest object
|
||||
"""
|
||||
db = self._get_db()
|
||||
|
||||
try:
|
||||
# Create test
|
||||
test = ABTest(
|
||||
name=name,
|
||||
description=description,
|
||||
test_type=test_type,
|
||||
platform=platform,
|
||||
status="draft",
|
||||
duration_hours=duration_hours,
|
||||
min_sample_size=min_sample_size,
|
||||
success_metric=success_metric
|
||||
)
|
||||
|
||||
db.add(test)
|
||||
db.flush() # Get the ID
|
||||
|
||||
# Create variants
|
||||
for variant_data in variants:
|
||||
variant = ABTestVariant(
|
||||
test_id=test.id,
|
||||
name=variant_data.get("name", "A"),
|
||||
content=variant_data.get("content", ""),
|
||||
hashtags=variant_data.get("hashtags"),
|
||||
image_url=variant_data.get("image_url")
|
||||
)
|
||||
db.add(variant)
|
||||
|
||||
db.commit()
|
||||
db.refresh(test)
|
||||
|
||||
return test
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating A/B test: {e}")
|
||||
db.rollback()
|
||||
raise
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def start_test(self, test_id: int) -> Dict:
|
||||
"""
|
||||
Start an A/B test by creating and scheduling posts for each variant.
|
||||
|
||||
Args:
|
||||
test_id: ID of the test to start
|
||||
|
||||
Returns:
|
||||
Dict with status and created post IDs
|
||||
"""
|
||||
db = self._get_db()
|
||||
|
||||
try:
|
||||
test = db.query(ABTest).filter(ABTest.id == test_id).first()
|
||||
if not test:
|
||||
return {"success": False, "error": "Test not found"}
|
||||
|
||||
if test.status != "draft":
|
||||
return {"success": False, "error": f"Test is already {test.status}"}
|
||||
|
||||
variants = db.query(ABTestVariant).filter(
|
||||
ABTestVariant.test_id == test_id
|
||||
).all()
|
||||
|
||||
if len(variants) < 2:
|
||||
return {"success": False, "error": "Test needs at least 2 variants"}
|
||||
|
||||
# Create posts for each variant
|
||||
post_ids = []
|
||||
now = datetime.utcnow()
|
||||
|
||||
for i, variant in enumerate(variants):
|
||||
# Schedule variants slightly apart to ensure fair distribution
|
||||
scheduled_at = now + timedelta(minutes=i * 5)
|
||||
|
||||
post = Post(
|
||||
content=variant.content,
|
||||
content_type="ab_test",
|
||||
platforms=[test.platform],
|
||||
status="scheduled",
|
||||
scheduled_at=scheduled_at,
|
||||
hashtags=variant.hashtags,
|
||||
image_url=variant.image_url,
|
||||
ab_test_id=test.id
|
||||
)
|
||||
|
||||
db.add(post)
|
||||
db.flush()
|
||||
|
||||
variant.post_id = post.id
|
||||
variant.published_at = scheduled_at
|
||||
post_ids.append(post.id)
|
||||
|
||||
# Update test status
|
||||
test.status = "running"
|
||||
test.started_at = now
|
||||
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"test_id": test.id,
|
||||
"post_ids": post_ids,
|
||||
"variants_count": len(variants)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error starting A/B test: {e}")
|
||||
db.rollback()
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def update_variant_metrics(self, test_id: int) -> Dict:
|
||||
"""
|
||||
Update metrics for all variants in a test from their posts.
|
||||
|
||||
Args:
|
||||
test_id: ID of the test
|
||||
|
||||
Returns:
|
||||
Dict with updated metrics
|
||||
"""
|
||||
db = self._get_db()
|
||||
|
||||
try:
|
||||
test = db.query(ABTest).filter(ABTest.id == test_id).first()
|
||||
if not test:
|
||||
return {"success": False, "error": "Test not found"}
|
||||
|
||||
variants = db.query(ABTestVariant).filter(
|
||||
ABTestVariant.test_id == test_id
|
||||
).all()
|
||||
|
||||
for variant in variants:
|
||||
if variant.post_id:
|
||||
post = db.query(Post).filter(Post.id == variant.post_id).first()
|
||||
if post and post.metrics:
|
||||
variant.likes = post.metrics.get("likes", 0)
|
||||
variant.comments = post.metrics.get("comments", 0)
|
||||
variant.shares = post.metrics.get("shares", 0) + post.metrics.get("retweets", 0)
|
||||
variant.impressions = post.metrics.get("impressions", 0)
|
||||
variant.reach = post.metrics.get("reach", 0)
|
||||
variant.calculate_engagement_rate()
|
||||
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"variants_updated": len(variants)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating variant metrics: {e}")
|
||||
db.rollback()
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def evaluate_test(self, test_id: int) -> Dict:
|
||||
"""
|
||||
Evaluate A/B test results and determine winner.
|
||||
|
||||
Args:
|
||||
test_id: ID of the test to evaluate
|
||||
|
||||
Returns:
|
||||
Dict with evaluation results
|
||||
"""
|
||||
db = self._get_db()
|
||||
|
||||
try:
|
||||
test = db.query(ABTest).filter(ABTest.id == test_id).first()
|
||||
if not test:
|
||||
return {"success": False, "error": "Test not found"}
|
||||
|
||||
# First update metrics
|
||||
await self.update_variant_metrics(test_id)
|
||||
|
||||
variants = db.query(ABTestVariant).filter(
|
||||
ABTestVariant.test_id == test_id
|
||||
).all()
|
||||
|
||||
if len(variants) < 2:
|
||||
return {"success": False, "error": "Not enough variants"}
|
||||
|
||||
# Check minimum sample size
|
||||
min_impressions = min(v.impressions for v in variants)
|
||||
if min_impressions < test.min_sample_size:
|
||||
return {
|
||||
"success": True,
|
||||
"status": "insufficient_data",
|
||||
"min_impressions": min_impressions,
|
||||
"required": test.min_sample_size
|
||||
}
|
||||
|
||||
# Determine winner based on success metric
|
||||
if test.success_metric == "engagement_rate":
|
||||
sorted_variants = sorted(variants, key=lambda v: v.engagement_rate, reverse=True)
|
||||
elif test.success_metric == "likes":
|
||||
sorted_variants = sorted(variants, key=lambda v: v.likes, reverse=True)
|
||||
elif test.success_metric == "comments":
|
||||
sorted_variants = sorted(variants, key=lambda v: v.comments, reverse=True)
|
||||
else:
|
||||
sorted_variants = sorted(variants, key=lambda v: v.engagement_rate, reverse=True)
|
||||
|
||||
winner = sorted_variants[0]
|
||||
runner_up = sorted_variants[1]
|
||||
|
||||
# Statistical significance test (chi-square for engagement)
|
||||
try:
|
||||
winner_engagements = winner.likes + winner.comments + winner.shares
|
||||
runner_up_engagements = runner_up.likes + runner_up.comments + runner_up.shares
|
||||
|
||||
contingency = [
|
||||
[winner_engagements, winner.impressions - winner_engagements],
|
||||
[runner_up_engagements, runner_up.impressions - runner_up_engagements]
|
||||
]
|
||||
|
||||
chi2, p_value, dof, expected = stats.chi2_contingency(contingency)
|
||||
confidence = (1 - p_value) * 100
|
||||
|
||||
except Exception:
|
||||
# If statistical test fails, just use raw comparison
|
||||
confidence = None
|
||||
p_value = None
|
||||
|
||||
# Update test with winner
|
||||
winner.is_winner = True
|
||||
test.winning_variant_id = winner.id
|
||||
test.confidence_level = confidence
|
||||
|
||||
# Check if test should be concluded
|
||||
if test.started_at:
|
||||
elapsed = datetime.utcnow() - test.started_at
|
||||
if elapsed.total_seconds() >= test.duration_hours * 3600:
|
||||
test.status = "completed"
|
||||
test.ended_at = datetime.utcnow()
|
||||
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"winner": {
|
||||
"variant_id": winner.id,
|
||||
"name": winner.name,
|
||||
"engagement_rate": winner.engagement_rate,
|
||||
"impressions": winner.impressions
|
||||
},
|
||||
"runner_up": {
|
||||
"variant_id": runner_up.id,
|
||||
"name": runner_up.name,
|
||||
"engagement_rate": runner_up.engagement_rate,
|
||||
"impressions": runner_up.impressions
|
||||
},
|
||||
"confidence_level": confidence,
|
||||
"p_value": p_value,
|
||||
"test_status": test.status
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error evaluating A/B test: {e}")
|
||||
db.rollback()
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def get_test(self, test_id: int) -> Optional[Dict]:
|
||||
"""Get a test with its variants."""
|
||||
db = self._get_db()
|
||||
|
||||
try:
|
||||
test = db.query(ABTest).filter(ABTest.id == test_id).first()
|
||||
if test:
|
||||
return test.to_dict()
|
||||
return None
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def get_tests(
|
||||
self,
|
||||
status: str = None,
|
||||
platform: str = None,
|
||||
limit: int = 20
|
||||
) -> List[Dict]:
|
||||
"""Get tests with optional filters."""
|
||||
db = self._get_db()
|
||||
|
||||
try:
|
||||
query = db.query(ABTest)
|
||||
|
||||
if status:
|
||||
query = query.filter(ABTest.status == status)
|
||||
if platform:
|
||||
query = query.filter(ABTest.platform == platform)
|
||||
|
||||
tests = query.order_by(ABTest.created_at.desc()).limit(limit).all()
|
||||
return [t.to_dict() for t in tests]
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def cancel_test(self, test_id: int) -> Dict:
|
||||
"""Cancel a running test."""
|
||||
db = self._get_db()
|
||||
|
||||
try:
|
||||
test = db.query(ABTest).filter(ABTest.id == test_id).first()
|
||||
if not test:
|
||||
return {"success": False, "error": "Test not found"}
|
||||
|
||||
test.status = "cancelled"
|
||||
test.ended_at = datetime.utcnow()
|
||||
|
||||
# Cancel any scheduled posts
|
||||
variants = db.query(ABTestVariant).filter(
|
||||
ABTestVariant.test_id == test_id
|
||||
).all()
|
||||
|
||||
for variant in variants:
|
||||
if variant.post_id:
|
||||
post = db.query(Post).filter(Post.id == variant.post_id).first()
|
||||
if post and post.status == "scheduled":
|
||||
post.status = "cancelled"
|
||||
|
||||
db.commit()
|
||||
|
||||
return {"success": True, "message": "Test cancelled"}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error cancelling A/B test: {e}")
|
||||
db.rollback()
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
# Global instance
|
||||
ab_testing_service = ABTestingService()
|
||||
423
app/services/analytics_service.py
Normal file
423
app/services/analytics_service.py
Normal file
@@ -0,0 +1,423 @@
|
||||
"""
|
||||
Analytics Service - Track and analyze post performance.
|
||||
"""
|
||||
|
||||
from datetime import datetime, date, timedelta
|
||||
from typing import List, Dict, Optional
|
||||
from sqlalchemy import func, desc
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.core.database import SessionLocal
|
||||
from app.models.post import Post
|
||||
from app.models.post_metrics import PostMetrics
|
||||
from app.models.analytics_report import AnalyticsReport
|
||||
from app.models.interaction import Interaction
|
||||
|
||||
|
||||
class AnalyticsService:
|
||||
"""Service for analytics and reporting."""
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def _get_db(self) -> Session:
|
||||
"""Get database session."""
|
||||
return SessionLocal()
|
||||
|
||||
async def get_dashboard_stats(
|
||||
self,
|
||||
days: int = 30,
|
||||
platform: Optional[str] = None
|
||||
) -> Dict:
|
||||
"""Get dashboard statistics."""
|
||||
db = self._get_db()
|
||||
try:
|
||||
start_date = datetime.utcnow() - timedelta(days=days)
|
||||
|
||||
# Base query for posts
|
||||
posts_query = db.query(Post).filter(
|
||||
Post.published_at >= start_date,
|
||||
Post.status == "published"
|
||||
)
|
||||
if platform:
|
||||
posts_query = posts_query.filter(Post.platforms.contains([platform]))
|
||||
|
||||
posts = posts_query.all()
|
||||
|
||||
# Aggregate metrics
|
||||
total_impressions = 0
|
||||
total_engagements = 0
|
||||
total_likes = 0
|
||||
total_comments = 0
|
||||
total_shares = 0
|
||||
|
||||
for post in posts:
|
||||
if post.metrics:
|
||||
total_likes += post.metrics.get("likes", 0)
|
||||
total_comments += post.metrics.get("comments", 0)
|
||||
total_shares += post.metrics.get("shares", 0) + post.metrics.get("retweets", 0)
|
||||
total_impressions += post.metrics.get("impressions", 0)
|
||||
|
||||
total_engagements = total_likes + total_comments + total_shares
|
||||
|
||||
# Calculate engagement rate
|
||||
avg_engagement_rate = 0.0
|
||||
if total_impressions > 0:
|
||||
avg_engagement_rate = (total_engagements / total_impressions) * 100
|
||||
|
||||
# Posts by platform
|
||||
platform_breakdown = {}
|
||||
for post in posts:
|
||||
for p in post.platforms:
|
||||
if p not in platform_breakdown:
|
||||
platform_breakdown[p] = {"posts": 0, "engagements": 0}
|
||||
platform_breakdown[p]["posts"] += 1
|
||||
if post.metrics:
|
||||
platform_breakdown[p]["engagements"] += (
|
||||
post.metrics.get("likes", 0) +
|
||||
post.metrics.get("comments", 0) +
|
||||
post.metrics.get("shares", 0)
|
||||
)
|
||||
|
||||
# Posts by content type
|
||||
content_breakdown = {}
|
||||
for post in posts:
|
||||
ct = post.content_type
|
||||
if ct not in content_breakdown:
|
||||
content_breakdown[ct] = {"posts": 0, "engagements": 0}
|
||||
content_breakdown[ct]["posts"] += 1
|
||||
if post.metrics:
|
||||
content_breakdown[ct]["engagements"] += (
|
||||
post.metrics.get("likes", 0) +
|
||||
post.metrics.get("comments", 0)
|
||||
)
|
||||
|
||||
# Pending interactions
|
||||
pending_interactions = db.query(Interaction).filter(
|
||||
Interaction.responded == False,
|
||||
Interaction.is_archived == False
|
||||
).count()
|
||||
|
||||
return {
|
||||
"period_days": days,
|
||||
"total_posts": len(posts),
|
||||
"total_impressions": total_impressions,
|
||||
"total_engagements": total_engagements,
|
||||
"total_likes": total_likes,
|
||||
"total_comments": total_comments,
|
||||
"total_shares": total_shares,
|
||||
"avg_engagement_rate": round(avg_engagement_rate, 2),
|
||||
"platform_breakdown": platform_breakdown,
|
||||
"content_breakdown": content_breakdown,
|
||||
"pending_interactions": pending_interactions
|
||||
}
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def get_top_posts(
|
||||
self,
|
||||
days: int = 30,
|
||||
limit: int = 10,
|
||||
platform: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""Get top performing posts by engagement."""
|
||||
db = self._get_db()
|
||||
try:
|
||||
start_date = datetime.utcnow() - timedelta(days=days)
|
||||
|
||||
posts_query = db.query(Post).filter(
|
||||
Post.published_at >= start_date,
|
||||
Post.status == "published",
|
||||
Post.metrics.isnot(None)
|
||||
)
|
||||
|
||||
if platform:
|
||||
posts_query = posts_query.filter(Post.platforms.contains([platform]))
|
||||
|
||||
posts = posts_query.all()
|
||||
|
||||
# Calculate engagement for each post and sort
|
||||
posts_with_engagement = []
|
||||
for post in posts:
|
||||
if post.metrics:
|
||||
engagement = (
|
||||
post.metrics.get("likes", 0) +
|
||||
post.metrics.get("comments", 0) +
|
||||
post.metrics.get("shares", 0) +
|
||||
post.metrics.get("retweets", 0)
|
||||
)
|
||||
impressions = post.metrics.get("impressions", 1)
|
||||
engagement_rate = (engagement / impressions * 100) if impressions > 0 else 0
|
||||
|
||||
posts_with_engagement.append({
|
||||
"id": post.id,
|
||||
"content": post.content[:100] + "..." if len(post.content) > 100 else post.content,
|
||||
"content_type": post.content_type,
|
||||
"platforms": post.platforms,
|
||||
"published_at": post.published_at.isoformat() if post.published_at else None,
|
||||
"likes": post.metrics.get("likes", 0),
|
||||
"comments": post.metrics.get("comments", 0),
|
||||
"shares": post.metrics.get("shares", 0) + post.metrics.get("retweets", 0),
|
||||
"impressions": impressions,
|
||||
"engagement_rate": round(engagement_rate, 2)
|
||||
})
|
||||
|
||||
# Sort by engagement rate
|
||||
posts_with_engagement.sort(key=lambda x: x["engagement_rate"], reverse=True)
|
||||
|
||||
return posts_with_engagement[:limit]
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def get_optimal_times(
|
||||
self,
|
||||
platform: Optional[str] = None,
|
||||
days: int = 90
|
||||
) -> List[Dict]:
|
||||
"""Calculate optimal posting times based on historical data."""
|
||||
db = self._get_db()
|
||||
try:
|
||||
start_date = datetime.utcnow() - timedelta(days=days)
|
||||
|
||||
posts_query = db.query(Post).filter(
|
||||
Post.published_at >= start_date,
|
||||
Post.status == "published",
|
||||
Post.metrics.isnot(None)
|
||||
)
|
||||
|
||||
if platform:
|
||||
posts_query = posts_query.filter(Post.platforms.contains([platform]))
|
||||
|
||||
posts = posts_query.all()
|
||||
|
||||
# Group by day of week and hour
|
||||
time_slots = {} # {(day, hour): [engagement_rates]}
|
||||
|
||||
for post in posts:
|
||||
if post.published_at and post.metrics:
|
||||
day = post.published_at.weekday()
|
||||
hour = post.published_at.hour
|
||||
|
||||
engagement = (
|
||||
post.metrics.get("likes", 0) +
|
||||
post.metrics.get("comments", 0) +
|
||||
post.metrics.get("shares", 0)
|
||||
)
|
||||
impressions = post.metrics.get("impressions", 1)
|
||||
rate = (engagement / impressions * 100) if impressions > 0 else 0
|
||||
|
||||
key = (day, hour)
|
||||
if key not in time_slots:
|
||||
time_slots[key] = []
|
||||
time_slots[key].append(rate)
|
||||
|
||||
# Calculate averages
|
||||
results = []
|
||||
for (day, hour), rates in time_slots.items():
|
||||
avg_rate = sum(rates) / len(rates) if rates else 0
|
||||
results.append({
|
||||
"day": day,
|
||||
"day_name": ["Lun", "Mar", "Mié", "Jue", "Vie", "Sáb", "Dom"][day],
|
||||
"hour": hour,
|
||||
"hour_formatted": f"{hour:02d}:00",
|
||||
"avg_engagement_rate": round(avg_rate, 2),
|
||||
"sample_size": len(rates)
|
||||
})
|
||||
|
||||
# Sort by engagement rate
|
||||
results.sort(key=lambda x: x["avg_engagement_rate"], reverse=True)
|
||||
|
||||
return results
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def generate_weekly_report(
|
||||
self,
|
||||
week_start: Optional[date] = None
|
||||
) -> AnalyticsReport:
|
||||
"""Generate weekly analytics report."""
|
||||
db = self._get_db()
|
||||
try:
|
||||
if week_start is None:
|
||||
# Last complete week
|
||||
today = date.today()
|
||||
week_start = today - timedelta(days=today.weekday() + 7)
|
||||
|
||||
week_end = week_start + timedelta(days=6)
|
||||
start_dt = datetime.combine(week_start, datetime.min.time())
|
||||
end_dt = datetime.combine(week_end, datetime.max.time())
|
||||
|
||||
# Previous week for comparison
|
||||
prev_start = week_start - timedelta(days=7)
|
||||
prev_end = week_end - timedelta(days=7)
|
||||
prev_start_dt = datetime.combine(prev_start, datetime.min.time())
|
||||
prev_end_dt = datetime.combine(prev_end, datetime.max.time())
|
||||
|
||||
# Current week posts
|
||||
posts = db.query(Post).filter(
|
||||
Post.published_at >= start_dt,
|
||||
Post.published_at <= end_dt,
|
||||
Post.status == "published"
|
||||
).all()
|
||||
|
||||
# Previous week posts
|
||||
prev_posts = db.query(Post).filter(
|
||||
Post.published_at >= prev_start_dt,
|
||||
Post.published_at <= prev_end_dt,
|
||||
Post.status == "published"
|
||||
).all()
|
||||
|
||||
# Calculate current week metrics
|
||||
total_impressions = 0
|
||||
total_engagements = 0
|
||||
total_likes = 0
|
||||
total_comments = 0
|
||||
total_shares = 0
|
||||
platform_breakdown = {}
|
||||
content_performance = {}
|
||||
|
||||
for post in posts:
|
||||
if post.metrics:
|
||||
likes = post.metrics.get("likes", 0)
|
||||
comments = post.metrics.get("comments", 0)
|
||||
shares = post.metrics.get("shares", 0) + post.metrics.get("retweets", 0)
|
||||
impressions = post.metrics.get("impressions", 0)
|
||||
|
||||
total_likes += likes
|
||||
total_comments += comments
|
||||
total_shares += shares
|
||||
total_impressions += impressions
|
||||
|
||||
# Platform breakdown
|
||||
for p in post.platforms:
|
||||
if p not in platform_breakdown:
|
||||
platform_breakdown[p] = {"posts": 0, "engagements": 0}
|
||||
platform_breakdown[p]["posts"] += 1
|
||||
platform_breakdown[p]["engagements"] += likes + comments + shares
|
||||
|
||||
# Content type performance
|
||||
ct = post.content_type
|
||||
if ct not in content_performance:
|
||||
content_performance[ct] = {"posts": 0, "engagements": 0, "impressions": 0}
|
||||
content_performance[ct]["posts"] += 1
|
||||
content_performance[ct]["engagements"] += likes + comments + shares
|
||||
content_performance[ct]["impressions"] += impressions
|
||||
|
||||
total_engagements = total_likes + total_comments + total_shares
|
||||
|
||||
# Calculate previous week totals for comparison
|
||||
prev_engagements = 0
|
||||
for post in prev_posts:
|
||||
if post.metrics:
|
||||
prev_engagements += (
|
||||
post.metrics.get("likes", 0) +
|
||||
post.metrics.get("comments", 0) +
|
||||
post.metrics.get("shares", 0)
|
||||
)
|
||||
|
||||
# Calculate changes
|
||||
posts_change = ((len(posts) - len(prev_posts)) / len(prev_posts) * 100) if prev_posts else 0
|
||||
engagement_change = ((total_engagements - prev_engagements) / prev_engagements * 100) if prev_engagements else 0
|
||||
|
||||
# Get top posts
|
||||
top_posts = await self.get_top_posts(days=7, limit=5)
|
||||
|
||||
# Get best times
|
||||
best_times = await self.get_optimal_times(days=30)
|
||||
|
||||
# Calculate averages
|
||||
avg_engagement_rate = (total_engagements / total_impressions * 100) if total_impressions > 0 else 0
|
||||
avg_impressions = total_impressions / len(posts) if posts else 0
|
||||
avg_engagements = total_engagements / len(posts) if posts else 0
|
||||
|
||||
# Create report
|
||||
report = AnalyticsReport(
|
||||
report_type="weekly",
|
||||
period_start=week_start,
|
||||
period_end=week_end,
|
||||
total_posts=len(posts),
|
||||
total_impressions=total_impressions,
|
||||
total_engagements=total_engagements,
|
||||
total_likes=total_likes,
|
||||
total_comments=total_comments,
|
||||
total_shares=total_shares,
|
||||
avg_engagement_rate=avg_engagement_rate,
|
||||
avg_impressions_per_post=avg_impressions,
|
||||
avg_engagements_per_post=avg_engagements,
|
||||
posts_change_pct=posts_change,
|
||||
engagement_change_pct=engagement_change,
|
||||
top_posts=top_posts[:5],
|
||||
best_times=best_times[:10],
|
||||
content_performance=content_performance,
|
||||
platform_breakdown=platform_breakdown
|
||||
)
|
||||
|
||||
# Generate summary text
|
||||
report.generate_telegram_summary()
|
||||
|
||||
db.add(report)
|
||||
db.commit()
|
||||
db.refresh(report)
|
||||
|
||||
return report
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def get_reports(
|
||||
self,
|
||||
report_type: str = "weekly",
|
||||
limit: int = 10
|
||||
) -> List[Dict]:
|
||||
"""Get historical reports."""
|
||||
db = self._get_db()
|
||||
try:
|
||||
reports = db.query(AnalyticsReport).filter(
|
||||
AnalyticsReport.report_type == report_type
|
||||
).order_by(desc(AnalyticsReport.period_start)).limit(limit).all()
|
||||
|
||||
return [r.to_dict() for r in reports]
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def record_post_metrics(
|
||||
self,
|
||||
post_id: int,
|
||||
platform: str,
|
||||
metrics: Dict
|
||||
) -> PostMetrics:
|
||||
"""Record metrics snapshot for a post."""
|
||||
db = self._get_db()
|
||||
try:
|
||||
post_metrics = PostMetrics(
|
||||
post_id=post_id,
|
||||
platform=platform,
|
||||
likes=metrics.get("likes", 0),
|
||||
comments=metrics.get("comments", 0),
|
||||
shares=metrics.get("shares", 0),
|
||||
impressions=metrics.get("impressions", 0),
|
||||
reach=metrics.get("reach", 0),
|
||||
saves=metrics.get("saves", 0),
|
||||
clicks=metrics.get("clicks", 0),
|
||||
replies=metrics.get("replies", 0),
|
||||
quotes=metrics.get("quotes", 0)
|
||||
)
|
||||
|
||||
post_metrics.calculate_engagement_rate()
|
||||
|
||||
db.add(post_metrics)
|
||||
db.commit()
|
||||
db.refresh(post_metrics)
|
||||
|
||||
return post_metrics
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
# Global instance
|
||||
analytics_service = AnalyticsService()
|
||||
493
app/services/odoo_service.py
Normal file
493
app/services/odoo_service.py
Normal file
@@ -0,0 +1,493 @@
|
||||
"""
|
||||
Odoo Integration Service - Sync products, services, and leads with Odoo ERP.
|
||||
"""
|
||||
|
||||
import xmlrpc.client
|
||||
from datetime import datetime
|
||||
from typing import List, Dict, Optional, Any
|
||||
import logging
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.database import SessionLocal
|
||||
from app.models.product import Product
|
||||
from app.models.service import Service
|
||||
from app.models.lead import Lead
|
||||
from app.models.odoo_sync_log import OdooSyncLog
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class OdooService:
|
||||
"""Service for Odoo ERP integration."""
|
||||
|
||||
def __init__(self):
|
||||
self.url = settings.ODOO_URL
|
||||
self.db = settings.ODOO_DB
|
||||
self.username = settings.ODOO_USERNAME
|
||||
self.password = settings.ODOO_PASSWORD
|
||||
self.enabled = settings.ODOO_SYNC_ENABLED
|
||||
self._uid = None
|
||||
self._common = None
|
||||
self._models = None
|
||||
|
||||
def _get_db(self):
|
||||
"""Get database session."""
|
||||
return SessionLocal()
|
||||
|
||||
def _connect(self) -> bool:
|
||||
"""Establish connection to Odoo."""
|
||||
if not self.enabled or not all([self.url, self.db, self.username, self.password]):
|
||||
logger.warning("Odoo not configured or disabled")
|
||||
return False
|
||||
|
||||
try:
|
||||
# Common endpoint for authentication
|
||||
self._common = xmlrpc.client.ServerProxy(f"{self.url}/xmlrpc/2/common")
|
||||
|
||||
# Authenticate
|
||||
self._uid = self._common.authenticate(
|
||||
self.db, self.username, self.password, {}
|
||||
)
|
||||
|
||||
if not self._uid:
|
||||
logger.error("Odoo authentication failed")
|
||||
return False
|
||||
|
||||
# Models endpoint for data operations
|
||||
self._models = xmlrpc.client.ServerProxy(f"{self.url}/xmlrpc/2/object")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Odoo connection error: {e}")
|
||||
return False
|
||||
|
||||
def _execute(self, model: str, method: str, *args, **kwargs) -> Any:
|
||||
"""Execute an Odoo model method."""
|
||||
if not self._models or not self._uid:
|
||||
if not self._connect():
|
||||
raise ConnectionError("Cannot connect to Odoo")
|
||||
|
||||
return self._models.execute_kw(
|
||||
self.db, self._uid, self.password,
|
||||
model, method, list(args), kwargs
|
||||
)
|
||||
|
||||
async def test_connection(self) -> Dict:
|
||||
"""Test connection to Odoo."""
|
||||
if not self.enabled:
|
||||
return {
|
||||
"connected": False,
|
||||
"error": "Odoo integration not enabled",
|
||||
"configured": bool(self.url)
|
||||
}
|
||||
|
||||
try:
|
||||
if self._connect():
|
||||
version = self._common.version()
|
||||
return {
|
||||
"connected": True,
|
||||
"version": version.get("server_version"),
|
||||
"uid": self._uid
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"connected": False,
|
||||
"error": "Authentication failed"
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"connected": False,
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
async def sync_products(self, limit: int = 100) -> Dict:
|
||||
"""
|
||||
Sync products from Odoo to local database.
|
||||
|
||||
Returns:
|
||||
Dict with sync statistics
|
||||
"""
|
||||
db = self._get_db()
|
||||
log = OdooSyncLog(
|
||||
sync_type="products",
|
||||
direction="import",
|
||||
status="started"
|
||||
)
|
||||
db.add(log)
|
||||
db.commit()
|
||||
|
||||
try:
|
||||
if not self._connect():
|
||||
log.mark_failed("Cannot connect to Odoo")
|
||||
db.commit()
|
||||
return {"success": False, "error": "Connection failed"}
|
||||
|
||||
# Fetch products from Odoo
|
||||
product_ids = self._execute(
|
||||
"product.template", "search",
|
||||
[["sale_ok", "=", True], ["active", "=", True]],
|
||||
limit=limit
|
||||
)
|
||||
|
||||
products_data = self._execute(
|
||||
"product.template", "read",
|
||||
product_ids,
|
||||
fields=["id", "name", "description_sale", "list_price", "categ_id",
|
||||
"image_1920", "qty_available", "default_code"]
|
||||
)
|
||||
|
||||
created = 0
|
||||
updated = 0
|
||||
failed = 0
|
||||
|
||||
for odoo_product in products_data:
|
||||
try:
|
||||
# Check if product exists locally
|
||||
local_product = db.query(Product).filter(
|
||||
Product.odoo_product_id == odoo_product["id"]
|
||||
).first()
|
||||
|
||||
product_data = {
|
||||
"name": odoo_product["name"],
|
||||
"description": odoo_product.get("description_sale") or "",
|
||||
"price": odoo_product.get("list_price", 0),
|
||||
"category": odoo_product.get("categ_id", [0, "general"])[1] if odoo_product.get("categ_id") else "general",
|
||||
"stock": int(odoo_product.get("qty_available", 0)),
|
||||
"is_available": odoo_product.get("qty_available", 0) > 0,
|
||||
"odoo_product_id": odoo_product["id"],
|
||||
"odoo_last_synced": datetime.utcnow()
|
||||
}
|
||||
|
||||
if local_product:
|
||||
# Update existing
|
||||
for key, value in product_data.items():
|
||||
setattr(local_product, key, value)
|
||||
updated += 1
|
||||
else:
|
||||
# Create new
|
||||
local_product = Product(**product_data)
|
||||
db.add(local_product)
|
||||
created += 1
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error syncing product {odoo_product.get('id')}: {e}")
|
||||
failed += 1
|
||||
|
||||
db.commit()
|
||||
|
||||
log.records_processed = len(products_data)
|
||||
log.records_created = created
|
||||
log.records_updated = updated
|
||||
log.records_failed = failed
|
||||
log.mark_completed()
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"processed": len(products_data),
|
||||
"created": created,
|
||||
"updated": updated,
|
||||
"failed": failed
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in sync_products: {e}")
|
||||
log.mark_failed(str(e))
|
||||
db.commit()
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def sync_services(self, limit: int = 100) -> Dict:
|
||||
"""
|
||||
Sync services from Odoo to local database.
|
||||
Services in Odoo are typically products with type='service'.
|
||||
|
||||
Returns:
|
||||
Dict with sync statistics
|
||||
"""
|
||||
db = self._get_db()
|
||||
log = OdooSyncLog(
|
||||
sync_type="services",
|
||||
direction="import",
|
||||
status="started"
|
||||
)
|
||||
db.add(log)
|
||||
db.commit()
|
||||
|
||||
try:
|
||||
if not self._connect():
|
||||
log.mark_failed("Cannot connect to Odoo")
|
||||
db.commit()
|
||||
return {"success": False, "error": "Connection failed"}
|
||||
|
||||
# Fetch service-type products from Odoo
|
||||
service_ids = self._execute(
|
||||
"product.template", "search",
|
||||
[["type", "=", "service"], ["sale_ok", "=", True], ["active", "=", True]],
|
||||
limit=limit
|
||||
)
|
||||
|
||||
services_data = self._execute(
|
||||
"product.template", "read",
|
||||
service_ids,
|
||||
fields=["id", "name", "description_sale", "list_price", "categ_id"]
|
||||
)
|
||||
|
||||
created = 0
|
||||
updated = 0
|
||||
failed = 0
|
||||
|
||||
for odoo_service in services_data:
|
||||
try:
|
||||
# Check if service exists locally
|
||||
local_service = db.query(Service).filter(
|
||||
Service.odoo_service_id == odoo_service["id"]
|
||||
).first()
|
||||
|
||||
service_data = {
|
||||
"name": odoo_service["name"],
|
||||
"description": odoo_service.get("description_sale") or "",
|
||||
"category": odoo_service.get("categ_id", [0, "general"])[1] if odoo_service.get("categ_id") else "general",
|
||||
"price_range": f"${odoo_service.get('list_price', 0):,.0f} MXN" if odoo_service.get("list_price") else None,
|
||||
"odoo_service_id": odoo_service["id"],
|
||||
"odoo_last_synced": datetime.utcnow()
|
||||
}
|
||||
|
||||
if local_service:
|
||||
# Update existing
|
||||
for key, value in service_data.items():
|
||||
setattr(local_service, key, value)
|
||||
updated += 1
|
||||
else:
|
||||
# Create new
|
||||
local_service = Service(**service_data)
|
||||
db.add(local_service)
|
||||
created += 1
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error syncing service {odoo_service.get('id')}: {e}")
|
||||
failed += 1
|
||||
|
||||
db.commit()
|
||||
|
||||
log.records_processed = len(services_data)
|
||||
log.records_created = created
|
||||
log.records_updated = updated
|
||||
log.records_failed = failed
|
||||
log.mark_completed()
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"processed": len(services_data),
|
||||
"created": created,
|
||||
"updated": updated,
|
||||
"failed": failed
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in sync_services: {e}")
|
||||
log.mark_failed(str(e))
|
||||
db.commit()
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def create_lead(self, lead: Lead) -> Dict:
|
||||
"""
|
||||
Create a lead in Odoo CRM.
|
||||
|
||||
Args:
|
||||
lead: Local lead object
|
||||
|
||||
Returns:
|
||||
Dict with Odoo lead ID and status
|
||||
"""
|
||||
try:
|
||||
if not self._connect():
|
||||
return {"success": False, "error": "Cannot connect to Odoo"}
|
||||
|
||||
# Prepare lead data for Odoo
|
||||
odoo_lead_data = {
|
||||
"name": lead.interest or f"Lead from {lead.platform}",
|
||||
"contact_name": lead.name or lead.username,
|
||||
"email_from": lead.email,
|
||||
"phone": lead.phone,
|
||||
"partner_name": lead.company,
|
||||
"description": f"""
|
||||
Source: {lead.platform}
|
||||
Username: @{lead.username}
|
||||
Profile: {lead.profile_url}
|
||||
|
||||
Original Content:
|
||||
{lead.source_content}
|
||||
|
||||
Notes:
|
||||
{lead.notes or 'No notes'}
|
||||
""".strip(),
|
||||
"type": "lead",
|
||||
"priority": "2" if lead.priority == "high" else "1" if lead.priority == "medium" else "0",
|
||||
}
|
||||
|
||||
# Create lead in Odoo
|
||||
odoo_lead_id = self._execute(
|
||||
"crm.lead", "create",
|
||||
[odoo_lead_data]
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"odoo_lead_id": odoo_lead_id
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating Odoo lead: {e}")
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
async def export_leads_to_odoo(self) -> Dict:
|
||||
"""
|
||||
Export unsynced leads to Odoo CRM.
|
||||
|
||||
Returns:
|
||||
Dict with export statistics
|
||||
"""
|
||||
db = self._get_db()
|
||||
log = OdooSyncLog(
|
||||
sync_type="leads",
|
||||
direction="export",
|
||||
status="started"
|
||||
)
|
||||
db.add(log)
|
||||
db.commit()
|
||||
|
||||
try:
|
||||
if not self._connect():
|
||||
log.mark_failed("Cannot connect to Odoo")
|
||||
db.commit()
|
||||
return {"success": False, "error": "Connection failed"}
|
||||
|
||||
# Get unsynced leads
|
||||
unsynced_leads = db.query(Lead).filter(
|
||||
Lead.synced_to_odoo == False
|
||||
).all()
|
||||
|
||||
created = 0
|
||||
failed = 0
|
||||
errors = []
|
||||
|
||||
for lead in unsynced_leads:
|
||||
try:
|
||||
result = await self.create_lead(lead)
|
||||
|
||||
if result["success"]:
|
||||
lead.odoo_lead_id = result["odoo_lead_id"]
|
||||
lead.synced_to_odoo = True
|
||||
lead.odoo_synced_at = datetime.utcnow()
|
||||
created += 1
|
||||
else:
|
||||
failed += 1
|
||||
errors.append({
|
||||
"lead_id": lead.id,
|
||||
"error": result.get("error")
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error exporting lead {lead.id}: {e}")
|
||||
failed += 1
|
||||
errors.append({
|
||||
"lead_id": lead.id,
|
||||
"error": str(e)
|
||||
})
|
||||
|
||||
db.commit()
|
||||
|
||||
log.records_processed = len(unsynced_leads)
|
||||
log.records_created = created
|
||||
log.records_failed = failed
|
||||
if errors:
|
||||
log.error_details = errors
|
||||
log.mark_completed()
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"processed": len(unsynced_leads),
|
||||
"created": created,
|
||||
"failed": failed,
|
||||
"errors": errors if errors else None
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in export_leads_to_odoo: {e}")
|
||||
log.mark_failed(str(e))
|
||||
db.commit()
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def get_sales_summary(self, days: int = 30) -> Dict:
|
||||
"""
|
||||
Get sales summary from Odoo.
|
||||
|
||||
Args:
|
||||
days: Number of days to look back
|
||||
|
||||
Returns:
|
||||
Dict with sales statistics
|
||||
"""
|
||||
try:
|
||||
if not self._connect():
|
||||
return {"success": False, "error": "Cannot connect to Odoo"}
|
||||
|
||||
from datetime import timedelta
|
||||
start_date = (datetime.utcnow() - timedelta(days=days)).strftime("%Y-%m-%d")
|
||||
|
||||
# Get confirmed sales orders
|
||||
order_ids = self._execute(
|
||||
"sale.order", "search",
|
||||
[["state", "in", ["sale", "done"]], ["date_order", ">=", start_date]]
|
||||
)
|
||||
|
||||
orders_data = self._execute(
|
||||
"sale.order", "read",
|
||||
order_ids,
|
||||
fields=["id", "name", "amount_total", "date_order", "partner_id", "state"]
|
||||
)
|
||||
|
||||
total_revenue = sum(order.get("amount_total", 0) for order in orders_data)
|
||||
total_orders = len(orders_data)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"period_days": days,
|
||||
"total_orders": total_orders,
|
||||
"total_revenue": total_revenue,
|
||||
"avg_order_value": total_revenue / total_orders if total_orders > 0 else 0,
|
||||
"orders": orders_data[:10] # Return latest 10
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting sales summary: {e}")
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
async def get_sync_logs(self, limit: int = 20) -> List[Dict]:
|
||||
"""Get recent sync logs."""
|
||||
db = self._get_db()
|
||||
try:
|
||||
logs = db.query(OdooSyncLog).order_by(
|
||||
OdooSyncLog.started_at.desc()
|
||||
).limit(limit).all()
|
||||
|
||||
return [log.to_dict() for log in logs]
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
# Global instance
|
||||
odoo_service = OdooService()
|
||||
327
app/services/recycling_service.py
Normal file
327
app/services/recycling_service.py
Normal file
@@ -0,0 +1,327 @@
|
||||
"""
|
||||
Content Recycling Service - Republish high-performing evergreen content.
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Dict, Optional
|
||||
import logging
|
||||
|
||||
from app.core.database import SessionLocal
|
||||
from app.models.post import Post
|
||||
from app.models.recycled_post import RecycledPost
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RecyclingService:
|
||||
"""Service for recycling high-performing content."""
|
||||
|
||||
# Default settings
|
||||
MIN_DAYS_SINCE_PUBLISH = 30 # Don't recycle recent posts
|
||||
MIN_ENGAGEMENT_RATE = 2.0 # Minimum engagement rate to consider
|
||||
MAX_RECYCLE_COUNT = 3 # Maximum times to recycle a post
|
||||
|
||||
def _get_db(self):
|
||||
"""Get database session."""
|
||||
return SessionLocal()
|
||||
|
||||
def _calculate_engagement_rate(self, metrics: dict) -> float:
|
||||
"""Calculate engagement rate from metrics."""
|
||||
if not metrics:
|
||||
return 0.0
|
||||
|
||||
impressions = metrics.get("impressions", 0)
|
||||
if impressions == 0:
|
||||
return 0.0
|
||||
|
||||
engagements = (
|
||||
metrics.get("likes", 0) +
|
||||
metrics.get("comments", 0) +
|
||||
metrics.get("shares", 0) +
|
||||
metrics.get("retweets", 0)
|
||||
)
|
||||
|
||||
return (engagements / impressions) * 100
|
||||
|
||||
async def find_recyclable_posts(
|
||||
self,
|
||||
platform: str = None,
|
||||
content_type: str = None,
|
||||
min_engagement_rate: float = None,
|
||||
min_days_since_publish: int = None,
|
||||
limit: int = 20
|
||||
) -> List[Dict]:
|
||||
"""
|
||||
Find posts that are good candidates for recycling.
|
||||
|
||||
Args:
|
||||
platform: Filter by platform
|
||||
content_type: Filter by content type
|
||||
min_engagement_rate: Minimum engagement rate threshold
|
||||
min_days_since_publish: Minimum days since original publish
|
||||
limit: Maximum candidates to return
|
||||
|
||||
Returns:
|
||||
List of recyclable post candidates with scores
|
||||
"""
|
||||
db = self._get_db()
|
||||
|
||||
try:
|
||||
min_eng = min_engagement_rate or self.MIN_ENGAGEMENT_RATE
|
||||
min_days = min_days_since_publish or self.MIN_DAYS_SINCE_PUBLISH
|
||||
|
||||
cutoff_date = datetime.utcnow() - timedelta(days=min_days)
|
||||
|
||||
# Query for published posts
|
||||
query = db.query(Post).filter(
|
||||
Post.status == "published",
|
||||
Post.published_at <= cutoff_date,
|
||||
Post.is_recyclable == True,
|
||||
Post.recycle_count < self.MAX_RECYCLE_COUNT,
|
||||
Post.metrics.isnot(None)
|
||||
)
|
||||
|
||||
if platform:
|
||||
query = query.filter(Post.platforms.contains([platform]))
|
||||
|
||||
if content_type:
|
||||
query = query.filter(Post.content_type == content_type)
|
||||
|
||||
posts = query.all()
|
||||
|
||||
# Calculate engagement and filter/sort
|
||||
candidates = []
|
||||
for post in posts:
|
||||
engagement_rate = self._calculate_engagement_rate(post.metrics)
|
||||
|
||||
if engagement_rate >= min_eng:
|
||||
# Calculate a recycling score
|
||||
days_since_publish = (datetime.utcnow() - post.published_at).days
|
||||
recency_factor = min(days_since_publish / 90, 1.0) # Max out at 90 days
|
||||
recycled_penalty = 1 - (post.recycle_count * 0.2) # 20% penalty per recycle
|
||||
|
||||
score = engagement_rate * recency_factor * recycled_penalty
|
||||
|
||||
candidates.append({
|
||||
"id": post.id,
|
||||
"content": post.content[:100] + "..." if len(post.content) > 100 else post.content,
|
||||
"full_content": post.content,
|
||||
"content_type": post.content_type,
|
||||
"platforms": post.platforms,
|
||||
"published_at": post.published_at.isoformat(),
|
||||
"days_since_publish": days_since_publish,
|
||||
"engagement_rate": round(engagement_rate, 2),
|
||||
"recycle_count": post.recycle_count,
|
||||
"score": round(score, 2),
|
||||
"metrics": post.metrics
|
||||
})
|
||||
|
||||
# Sort by score
|
||||
candidates.sort(key=lambda x: x["score"], reverse=True)
|
||||
|
||||
return candidates[:limit]
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def recycle_post(
|
||||
self,
|
||||
post_id: int,
|
||||
modifications: Dict = None,
|
||||
scheduled_for: datetime = None,
|
||||
platforms: List[str] = None,
|
||||
reason: str = "manual"
|
||||
) -> Dict:
|
||||
"""
|
||||
Create a recycled version of a post.
|
||||
|
||||
Args:
|
||||
post_id: Original post ID
|
||||
modifications: Dict of modifications {content, hashtags, image_url}
|
||||
scheduled_for: When to publish (defaults to now + 1 hour)
|
||||
platforms: Override platforms (defaults to original)
|
||||
reason: Reason for recycling (high_performer, evergreen, seasonal, manual)
|
||||
|
||||
Returns:
|
||||
Dict with new post info
|
||||
"""
|
||||
db = self._get_db()
|
||||
|
||||
try:
|
||||
original = db.query(Post).filter(Post.id == post_id).first()
|
||||
if not original:
|
||||
return {"success": False, "error": "Original post not found"}
|
||||
|
||||
if not original.is_recyclable:
|
||||
return {"success": False, "error": "Post is marked as not recyclable"}
|
||||
|
||||
if original.recycle_count >= self.MAX_RECYCLE_COUNT:
|
||||
return {"success": False, "error": f"Post has been recycled {original.recycle_count} times (max {self.MAX_RECYCLE_COUNT})"}
|
||||
|
||||
# Create new post
|
||||
new_content = modifications.get("content") if modifications else None
|
||||
new_hashtags = modifications.get("hashtags") if modifications else None
|
||||
new_image = modifications.get("image_url") if modifications else None
|
||||
|
||||
new_post = Post(
|
||||
content=new_content or original.content,
|
||||
content_type=original.content_type,
|
||||
platforms=platforms or original.platforms,
|
||||
status="scheduled",
|
||||
scheduled_at=scheduled_for or (datetime.utcnow() + timedelta(hours=1)),
|
||||
hashtags=new_hashtags or original.hashtags,
|
||||
image_url=new_image or original.image_url,
|
||||
recycled_from_id=original.id,
|
||||
is_recyclable=True
|
||||
)
|
||||
|
||||
db.add(new_post)
|
||||
db.flush()
|
||||
|
||||
# Track the recycling
|
||||
recycle_record = RecycledPost(
|
||||
original_post_id=original.id,
|
||||
new_post_id=new_post.id,
|
||||
recycle_number=original.recycle_count + 1,
|
||||
modifications={
|
||||
"content_changed": bool(new_content),
|
||||
"hashtags_updated": bool(new_hashtags),
|
||||
"image_changed": bool(new_image)
|
||||
},
|
||||
original_engagement_rate=self._calculate_engagement_rate(original.metrics),
|
||||
reason=reason,
|
||||
status="pending",
|
||||
scheduled_for=new_post.scheduled_at
|
||||
)
|
||||
|
||||
db.add(recycle_record)
|
||||
|
||||
# Update original's recycle count
|
||||
original.recycle_count += 1
|
||||
|
||||
db.commit()
|
||||
db.refresh(new_post)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"new_post_id": new_post.id,
|
||||
"recycle_record_id": recycle_record.id,
|
||||
"scheduled_for": new_post.scheduled_at.isoformat(),
|
||||
"platforms": new_post.platforms
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error recycling post: {e}")
|
||||
db.rollback()
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def auto_recycle(
|
||||
self,
|
||||
platform: str = None,
|
||||
count: int = 1,
|
||||
min_engagement_rate: float = None
|
||||
) -> Dict:
|
||||
"""
|
||||
Automatically select and recycle top-performing posts.
|
||||
|
||||
Args:
|
||||
platform: Filter by platform
|
||||
count: Number of posts to recycle
|
||||
min_engagement_rate: Minimum engagement rate threshold
|
||||
|
||||
Returns:
|
||||
Dict with recycled posts info
|
||||
"""
|
||||
db = self._get_db()
|
||||
|
||||
try:
|
||||
# Find candidates
|
||||
candidates = await self.find_recyclable_posts(
|
||||
platform=platform,
|
||||
min_engagement_rate=min_engagement_rate or self.MIN_ENGAGEMENT_RATE,
|
||||
limit=count * 2 # Get extra in case some fail
|
||||
)
|
||||
|
||||
if not candidates:
|
||||
return {
|
||||
"success": True,
|
||||
"recycled": 0,
|
||||
"message": "No eligible posts found for recycling"
|
||||
}
|
||||
|
||||
recycled = []
|
||||
for candidate in candidates[:count]:
|
||||
result = await self.recycle_post(
|
||||
post_id=candidate["id"],
|
||||
reason="high_performer"
|
||||
)
|
||||
|
||||
if result.get("success"):
|
||||
recycled.append({
|
||||
"original_id": candidate["id"],
|
||||
"new_post_id": result["new_post_id"],
|
||||
"engagement_rate": candidate["engagement_rate"]
|
||||
})
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"recycled": len(recycled),
|
||||
"posts": recycled
|
||||
}
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def get_recycling_history(
|
||||
self,
|
||||
original_post_id: int = None,
|
||||
limit: int = 50
|
||||
) -> List[Dict]:
|
||||
"""
|
||||
Get recycling history.
|
||||
|
||||
Args:
|
||||
original_post_id: Filter by original post
|
||||
limit: Maximum records to return
|
||||
|
||||
Returns:
|
||||
List of recycling records
|
||||
"""
|
||||
db = self._get_db()
|
||||
|
||||
try:
|
||||
query = db.query(RecycledPost)
|
||||
|
||||
if original_post_id:
|
||||
query = query.filter(RecycledPost.original_post_id == original_post_id)
|
||||
|
||||
records = query.order_by(RecycledPost.recycled_at.desc()).limit(limit).all()
|
||||
|
||||
return [r.to_dict() for r in records]
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def mark_post_not_recyclable(self, post_id: int) -> Dict:
|
||||
"""Mark a post as not eligible for recycling."""
|
||||
db = self._get_db()
|
||||
|
||||
try:
|
||||
post = db.query(Post).filter(Post.id == post_id).first()
|
||||
if not post:
|
||||
return {"success": False, "error": "Post not found"}
|
||||
|
||||
post.is_recyclable = False
|
||||
db.commit()
|
||||
|
||||
return {"success": True, "message": "Post marked as not recyclable"}
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
# Global instance
|
||||
recycling_service = RecyclingService()
|
||||
393
app/services/thread_service.py
Normal file
393
app/services/thread_service.py
Normal file
@@ -0,0 +1,393 @@
|
||||
"""
|
||||
Thread Series Service - Create and manage multi-post threads.
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Dict, Optional
|
||||
import logging
|
||||
|
||||
from app.core.database import SessionLocal
|
||||
from app.core.config import settings
|
||||
from app.models.thread_series import ThreadSeries, ThreadPost
|
||||
from app.models.post import Post
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ThreadService:
|
||||
"""Service for managing thread series."""
|
||||
|
||||
def _get_db(self):
|
||||
"""Get database session."""
|
||||
return SessionLocal()
|
||||
|
||||
async def create_series(
|
||||
self,
|
||||
name: str,
|
||||
platform: str,
|
||||
posts_content: List[Dict],
|
||||
description: str = None,
|
||||
topic: str = None,
|
||||
schedule_type: str = "sequential",
|
||||
interval_minutes: int = 5,
|
||||
start_time: datetime = None,
|
||||
hashtags: List[str] = None,
|
||||
ai_generated: bool = False,
|
||||
generation_prompt: str = None
|
||||
) -> ThreadSeries:
|
||||
"""
|
||||
Create a new thread series with posts.
|
||||
|
||||
Args:
|
||||
name: Series name
|
||||
platform: Target platform (x, threads)
|
||||
posts_content: List of post content dicts [{"content": "...", "image_url": "..."}]
|
||||
description: Series description
|
||||
topic: Topic for categorization
|
||||
schedule_type: "sequential" or "timed"
|
||||
interval_minutes: Minutes between posts
|
||||
start_time: When to start publishing
|
||||
hashtags: Common hashtags for the series
|
||||
ai_generated: Whether content was AI-generated
|
||||
generation_prompt: The prompt used for AI generation
|
||||
|
||||
Returns:
|
||||
Created ThreadSeries object
|
||||
"""
|
||||
db = self._get_db()
|
||||
|
||||
try:
|
||||
# Create series
|
||||
series = ThreadSeries(
|
||||
name=name,
|
||||
description=description,
|
||||
topic=topic,
|
||||
platform=platform,
|
||||
schedule_type=schedule_type,
|
||||
interval_minutes=interval_minutes,
|
||||
start_time=start_time,
|
||||
total_posts=len(posts_content),
|
||||
status="draft",
|
||||
hashtags=hashtags,
|
||||
ai_generated=ai_generated,
|
||||
generation_prompt=generation_prompt
|
||||
)
|
||||
|
||||
db.add(series)
|
||||
db.flush()
|
||||
|
||||
# Create thread posts
|
||||
for i, content_data in enumerate(posts_content, 1):
|
||||
thread_post = ThreadPost(
|
||||
series_id=series.id,
|
||||
sequence_number=i,
|
||||
content=content_data.get("content", ""),
|
||||
image_url=content_data.get("image_url"),
|
||||
status="pending"
|
||||
)
|
||||
db.add(thread_post)
|
||||
|
||||
db.commit()
|
||||
db.refresh(series)
|
||||
|
||||
return series
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating thread series: {e}")
|
||||
db.rollback()
|
||||
raise
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def generate_thread_with_ai(
|
||||
self,
|
||||
topic: str,
|
||||
platform: str,
|
||||
num_posts: int = 5,
|
||||
style: str = "educational"
|
||||
) -> Dict:
|
||||
"""
|
||||
Generate a thread series using AI.
|
||||
|
||||
Args:
|
||||
topic: The topic to generate content about
|
||||
platform: Target platform
|
||||
num_posts: Number of posts in the thread
|
||||
style: Style of content (educational, storytelling, tips)
|
||||
|
||||
Returns:
|
||||
Dict with generated content
|
||||
"""
|
||||
if not settings.DEEPSEEK_API_KEY:
|
||||
return {"success": False, "error": "AI API not configured"}
|
||||
|
||||
try:
|
||||
from app.services.content_generator import content_generator
|
||||
|
||||
# Generate thread content
|
||||
prompt = f"""Genera un hilo de {num_posts} publicaciones sobre: {topic}
|
||||
|
||||
Estilo: {style}
|
||||
Plataforma: {platform}
|
||||
|
||||
Requisitos:
|
||||
1. Cada publicación debe ser concisa (max 280 caracteres para X, 500 para Threads)
|
||||
2. La primera publicación debe captar atención
|
||||
3. Cada publicación debe añadir valor y conectar con la siguiente
|
||||
4. La última debe tener un llamado a la acción
|
||||
5. Usa emojis apropiadamente
|
||||
6. Mantén el tono profesional pero accesible
|
||||
|
||||
Formato de respuesta (JSON):
|
||||
[
|
||||
{{"sequence": 1, "content": "..."}},
|
||||
{{"sequence": 2, "content": "..."}},
|
||||
...
|
||||
]
|
||||
"""
|
||||
|
||||
# Use the content generator to get AI response
|
||||
response = await content_generator._call_deepseek(
|
||||
prompt,
|
||||
temperature=0.7
|
||||
)
|
||||
|
||||
# Parse the response
|
||||
import json
|
||||
import re
|
||||
|
||||
# Try to extract JSON from response
|
||||
json_match = re.search(r'\[[\s\S]*\]', response)
|
||||
if json_match:
|
||||
posts_data = json.loads(json_match.group())
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"topic": topic,
|
||||
"platform": platform,
|
||||
"style": style,
|
||||
"posts": posts_data,
|
||||
"count": len(posts_data)
|
||||
}
|
||||
else:
|
||||
return {"success": False, "error": "Could not parse AI response"}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating thread with AI: {e}")
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
async def schedule_series(
|
||||
self,
|
||||
series_id: int,
|
||||
start_time: datetime = None
|
||||
) -> Dict:
|
||||
"""
|
||||
Schedule a thread series for publishing.
|
||||
|
||||
Args:
|
||||
series_id: ID of the series
|
||||
start_time: When to start (defaults to now + 5 minutes)
|
||||
|
||||
Returns:
|
||||
Dict with scheduling info
|
||||
"""
|
||||
db = self._get_db()
|
||||
|
||||
try:
|
||||
series = db.query(ThreadSeries).filter(ThreadSeries.id == series_id).first()
|
||||
if not series:
|
||||
return {"success": False, "error": "Series not found"}
|
||||
|
||||
if series.status not in ["draft", "paused"]:
|
||||
return {"success": False, "error": f"Series is {series.status}, cannot schedule"}
|
||||
|
||||
thread_posts = db.query(ThreadPost).filter(
|
||||
ThreadPost.series_id == series_id
|
||||
).order_by(ThreadPost.sequence_number).all()
|
||||
|
||||
if not thread_posts:
|
||||
return {"success": False, "error": "Series has no posts"}
|
||||
|
||||
# Set start time
|
||||
start = start_time or (datetime.utcnow() + timedelta(minutes=5))
|
||||
series.start_time = start
|
||||
|
||||
# Schedule each post
|
||||
for i, thread_post in enumerate(thread_posts):
|
||||
scheduled_at = start + timedelta(minutes=i * series.interval_minutes)
|
||||
|
||||
# Create the actual post
|
||||
post = Post(
|
||||
content=thread_post.content,
|
||||
content_type="thread",
|
||||
platforms=[series.platform],
|
||||
status="scheduled",
|
||||
scheduled_at=scheduled_at,
|
||||
hashtags=series.hashtags,
|
||||
image_url=thread_post.image_url
|
||||
)
|
||||
|
||||
db.add(post)
|
||||
db.flush()
|
||||
|
||||
thread_post.post_id = post.id
|
||||
thread_post.scheduled_at = scheduled_at
|
||||
thread_post.status = "scheduled"
|
||||
|
||||
series.status = "scheduled"
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"series_id": series_id,
|
||||
"start_time": start.isoformat(),
|
||||
"posts_scheduled": len(thread_posts)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error scheduling series: {e}")
|
||||
db.rollback()
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def publish_next_post(self, series_id: int) -> Dict:
|
||||
"""
|
||||
Publish the next pending post in a series.
|
||||
|
||||
Args:
|
||||
series_id: ID of the series
|
||||
|
||||
Returns:
|
||||
Dict with publish result
|
||||
"""
|
||||
db = self._get_db()
|
||||
|
||||
try:
|
||||
series = db.query(ThreadSeries).filter(ThreadSeries.id == series_id).first()
|
||||
if not series:
|
||||
return {"success": False, "error": "Series not found"}
|
||||
|
||||
# Find next unpublished post
|
||||
next_post = db.query(ThreadPost).filter(
|
||||
ThreadPost.series_id == series_id,
|
||||
ThreadPost.status.in_(["pending", "scheduled"])
|
||||
).order_by(ThreadPost.sequence_number).first()
|
||||
|
||||
if not next_post:
|
||||
# All posts published
|
||||
series.status = "completed"
|
||||
series.completed_at = datetime.utcnow()
|
||||
db.commit()
|
||||
return {
|
||||
"success": True,
|
||||
"message": "All posts in series have been published",
|
||||
"series_completed": True
|
||||
}
|
||||
|
||||
# Publish the post (this would normally trigger the publish task)
|
||||
# For now, just mark it and let the scheduler handle it
|
||||
|
||||
if next_post.sequence_number == 1:
|
||||
series.status = "publishing"
|
||||
|
||||
# Update series progress
|
||||
series.posts_published = db.query(ThreadPost).filter(
|
||||
ThreadPost.series_id == series_id,
|
||||
ThreadPost.status == "published"
|
||||
).count()
|
||||
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"post_id": next_post.post_id,
|
||||
"sequence_number": next_post.sequence_number,
|
||||
"total_posts": series.total_posts
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error publishing next post: {e}")
|
||||
db.rollback()
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def get_series(self, series_id: int) -> Optional[Dict]:
|
||||
"""Get a series with its posts."""
|
||||
db = self._get_db()
|
||||
|
||||
try:
|
||||
series = db.query(ThreadSeries).filter(ThreadSeries.id == series_id).first()
|
||||
if series:
|
||||
return series.to_dict(include_posts=True)
|
||||
return None
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def get_series_list(
|
||||
self,
|
||||
status: str = None,
|
||||
platform: str = None,
|
||||
limit: int = 20
|
||||
) -> List[Dict]:
|
||||
"""Get list of thread series."""
|
||||
db = self._get_db()
|
||||
|
||||
try:
|
||||
query = db.query(ThreadSeries)
|
||||
|
||||
if status:
|
||||
query = query.filter(ThreadSeries.status == status)
|
||||
if platform:
|
||||
query = query.filter(ThreadSeries.platform == platform)
|
||||
|
||||
series_list = query.order_by(ThreadSeries.created_at.desc()).limit(limit).all()
|
||||
return [s.to_dict(include_posts=False) for s in series_list]
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def cancel_series(self, series_id: int) -> Dict:
|
||||
"""Cancel a series and its scheduled posts."""
|
||||
db = self._get_db()
|
||||
|
||||
try:
|
||||
series = db.query(ThreadSeries).filter(ThreadSeries.id == series_id).first()
|
||||
if not series:
|
||||
return {"success": False, "error": "Series not found"}
|
||||
|
||||
series.status = "cancelled"
|
||||
|
||||
# Cancel scheduled posts
|
||||
thread_posts = db.query(ThreadPost).filter(
|
||||
ThreadPost.series_id == series_id,
|
||||
ThreadPost.status == "scheduled"
|
||||
).all()
|
||||
|
||||
for tp in thread_posts:
|
||||
tp.status = "cancelled"
|
||||
if tp.post_id:
|
||||
post = db.query(Post).filter(Post.id == tp.post_id).first()
|
||||
if post and post.status == "scheduled":
|
||||
post.status = "cancelled"
|
||||
|
||||
db.commit()
|
||||
|
||||
return {"success": True, "message": "Series cancelled"}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error cancelling series: {e}")
|
||||
db.rollback()
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
# Global instance
|
||||
thread_service = ThreadService()
|
||||
Reference in New Issue
Block a user