import gradio as gr import os import requests from crewai import Agent, Task, Crew, Process, LLM from crewai_tools import ScrapeWebsiteTool from crewai.tools import BaseTool from dotenv import load_dotenv load_dotenv() class ShortenerTool(BaseTool): name: str = "URL Shortener Tool" description: str = "Generates a short version of a given URL using an external API." natura_api_token: str def _run(self, original_url: str) -> str: api_url = "https://sales-mgmt-cb-bff-apigw.prd.naturacloud.com/cb-bff-cms/cms/shortener" headers = {"authorization": f"Bearer {self.natura_api_token}", "content-type": "application/json"} payload = {"url": original_url} try: response = requests.post(api_url, headers=headers, json=payload) response.raise_for_status() short_url_data = response.json() return short_url_data.get("short", original_url) except requests.exceptions.RequestException as e: print(f"Warning: Error generating short URL: {e}. Returning original URL.") return original_url except ValueError: print("Warning: Invalid JSON response from shortener API. Returning original URL.") return original_url class CalculateDiscountedPriceTool(BaseTool): name: str = "Calculate Discounted Price Tool" description: str = "Calculates the price after applying a given discount percentage." def _run(self, original_price: float, discount_percentage: float) -> float: if not isinstance(original_price, (int, float)) or not isinstance(discount_percentage, (int, float)): raise ValueError("Both original_price and discount_percentage must be numbers.") if discount_percentage < 0 or discount_percentage > 100: raise ValueError("Discount percentage must be between 0 and 100.") discount_amount = original_price * (discount_percentage / 100) discounted_price = original_price - discount_amount return round(discounted_price, 2) class SocialMediaCrew: def __init__(self, openai_api_key: str, natura_api_token: str, openai_base_url: str, openai_model_name: str): self.openai_api_key = openai_api_key self.natura_api_token = natura_api_token self.openai_base_url = openai_base_url self.openai_model_name = openai_model_name self.scrape_tool = ScrapeWebsiteTool() self.shortener_tool = ShortenerTool(natura_api_token=self.natura_api_token) self.calculate_discounted_price_tool = CalculateDiscountedPriceTool() print("Initializing SocialMediaCrew with BASE URL:", self.openai_base_url) print("Using OpenAI Model:", self.openai_model_name) print("Using OpenAI Key:", self.openai_api_key[:10]) llm = LLM( api_key=self.openai_api_key, model=self.openai_model_name, base_url=self.openai_base_url ) self.product_analyst = Agent( role='Product Analyst', goal='Analyze the provided URL and extract key product information', backstory=("You are an expert in analyzing product pages and extracting the most important information. You can identify the product name, its main features, and the target audience."), verbose=True, tools=[self.scrape_tool, self.shortener_tool, self.calculate_discounted_price_tool], allow_delegation=False, llm=llm ) self.social_media_copywriter = Agent( role='Social Media Copywriter', goal='Create a compelling social media post in Portuguese to sell the product', backstory=("You are a creative copywriter specialized in the beauty and fragrance market. You know how to craft posts that are engaging, persuasive, and tailored for a Portuguese-speaking audience. You are an expert in using emojis and hashtags to increase engagement."), verbose=True, allow_delegation=False, llm=llm ) def run_crew(self, product_url: str, main_cupom: str, main_cupom_discount_percentage: float, cupom_1: str, cupom_2: str) -> str: headers = { "accept": "*/*", "accept-language": "pt-BR,pt;q=0.9,en-US;q=0.8,en;q=0.7", "sec-ch-ua": '"Not)A;Brand";v="8", "Chromium";v="138", "Google Chrome";v="138"' , "sec-ch-ua-mobile": "?0", "sec-ch-ua-platform": '"Windows"' , "sec-fetch-dest": "empty", "sec-fetch-mode": "cors", "sec-fetch-site": "cross-site", "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36", } try: response = requests.get(product_url, headers=headers) response.raise_for_status() if '