From 991bd161f0aaf10c7e921643ac445de706ad29d1 Mon Sep 17 00:00:00 2001 From: Claude AI Date: Thu, 29 Jan 2026 11:13:15 +0000 Subject: [PATCH] feat(fase4): add AI response and sentiment nodes Co-Authored-By: Claude Opus 4.5 --- services/flow-engine/app/config.py | 5 + services/flow-engine/app/nodes/__init__.py | 18 +++- services/flow-engine/app/nodes/ai.py | 117 +++++++++++++++++++++ 3 files changed, 137 insertions(+), 3 deletions(-) create mode 100644 services/flow-engine/app/nodes/ai.py diff --git a/services/flow-engine/app/config.py b/services/flow-engine/app/config.py index deb0f28..bd5a365 100644 --- a/services/flow-engine/app/config.py +++ b/services/flow-engine/app/config.py @@ -5,6 +5,11 @@ from functools import lru_cache class Settings(BaseSettings): DATABASE_URL: str = "postgresql://whatsapp_admin:password@localhost:5432/whatsapp_central" REDIS_URL: str = "redis://localhost:6379" + + # OpenAI + OPENAI_API_KEY: str = "" + OPENAI_MODEL: str = "gpt-3.5-turbo" + API_GATEWAY_URL: str = "http://localhost:8000" WHATSAPP_CORE_URL: str = "http://localhost:3001" diff --git a/services/flow-engine/app/nodes/__init__.py b/services/flow-engine/app/nodes/__init__.py index 43e097f..8d0a748 100644 --- a/services/flow-engine/app/nodes/__init__.py +++ b/services/flow-engine/app/nodes/__init__.py @@ -1,6 +1,18 @@ +from app.nodes.advanced import ( + DelayExecutor, + GoToExecutor, + LoopExecutor, + RandomExecutor, + SwitchExecutor, +) +from app.nodes.ai import AIResponseExecutor, AISentimentExecutor from app.nodes.base import NodeExecutor, NodeRegistry from app.nodes.basic import ( - TriggerExecutor, MessageExecutor, ButtonsExecutor, - WaitInputExecutor, SetVariableExecutor, ConditionExecutor + ButtonsExecutor, + ConditionExecutor, + MessageExecutor, + SetVariableExecutor, + TriggerExecutor, + WaitInputExecutor, ) -from app.nodes.script import JavaScriptExecutor, HttpRequestExecutor +from app.nodes.script import HttpRequestExecutor, JavaScriptExecutor diff --git a/services/flow-engine/app/nodes/ai.py b/services/flow-engine/app/nodes/ai.py new file mode 100644 index 0000000..c660e28 --- /dev/null +++ b/services/flow-engine/app/nodes/ai.py @@ -0,0 +1,117 @@ +from typing import Any, Optional + +import httpx + +from app.config import get_settings +from app.context import FlowContext +from app.nodes.base import NodeExecutor + +settings = get_settings() + + +class AIResponseExecutor(NodeExecutor): + """Generate AI response using OpenAI""" + + async def execute( + self, config: dict, context: FlowContext, session: Any + ) -> Optional[str]: + prompt = context.interpolate(config.get("prompt", "")) + system_prompt = config.get("system_prompt", "Eres un asistente Ăștil.") + output_variable = config.get("output_variable", "_ai_response") + max_tokens = config.get("max_tokens", 500) + temperature = config.get("temperature", 0.7) + + if not settings.OPENAI_API_KEY: + context.set("_ai_error", "OpenAI API key not configured") + return "error" + + if not prompt: + return "error" + + messages = [ + {"role": "system", "content": system_prompt}, + {"role": "user", "content": prompt}, + ] + + if config.get("include_history", False): + history = context.get("_conversation_history") or [] + messages = ( + [{"role": "system", "content": system_prompt}] + + history + + [{"role": "user", "content": prompt}] + ) + + async with httpx.AsyncClient() as client: + response = await client.post( + "https://api.openai.com/v1/chat/completions", + headers={ + "Authorization": f"Bearer {settings.OPENAI_API_KEY}", + "Content-Type": "application/json", + }, + json={ + "model": settings.OPENAI_MODEL, + "messages": messages, + "max_tokens": max_tokens, + "temperature": temperature, + }, + timeout=30, + ) + + if response.status_code != 200: + context.set("_ai_error", response.text) + return "error" + + data = response.json() + ai_response = data["choices"][0]["message"]["content"] + context.set(output_variable, ai_response) + return "success" + + +class AISentimentExecutor(NodeExecutor): + """Analyze sentiment of user message""" + + async def execute( + self, config: dict, context: FlowContext, session: Any + ) -> Optional[str]: + text = context.get(config.get("variable", "")) or context.message.get( + "content", "" + ) + output_variable = config.get("output_variable", "_sentiment") + + if not settings.OPENAI_API_KEY or not text: + return "neutral" + + async with httpx.AsyncClient() as client: + response = await client.post( + "https://api.openai.com/v1/chat/completions", + headers={ + "Authorization": f"Bearer {settings.OPENAI_API_KEY}", + "Content-Type": "application/json", + }, + json={ + "model": "gpt-3.5-turbo", + "messages": [ + { + "role": "system", + "content": "Analyze the sentiment. Reply with only one word: positive, negative, or neutral", + }, + {"role": "user", "content": text}, + ], + "max_tokens": 10, + "temperature": 0, + }, + timeout=15, + ) + + if response.status_code != 200: + return "neutral" + + data = response.json() + sentiment = data["choices"][0]["message"]["content"].lower().strip() + context.set(output_variable, sentiment) + + if "positive" in sentiment: + return "positive" + if "negative" in sentiment: + return "negative" + return "neutral"