File size: 14,507 Bytes
3c7d0f6
 
 
e4f36f5
5b4a293
3cdec01
472175a
 
5b4a293
3cdec01
472175a
3c7d0f6
 
e4f36f5
170eb66
 
e4f36f5
 
3cdec01
3c7d0f6
3cdec01
472175a
 
3c7d0f6
e4f36f5
 
 
 
 
 
 
 
 
 
3c7d0f6
 
 
 
 
3cdec01
 
 
170eb66
3cdec01
 
3c7d0f6
 
 
 
 
 
 
472175a
170eb66
3cdec01
 
3c7d0f6
 
472175a
3c7d0f6
 
 
472175a
3c7d0f6
472175a
3c7d0f6
 
 
 
 
 
 
 
 
472175a
 
3c7d0f6
 
472175a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3c7d0f6
472175a
3c7d0f6
 
472175a
 
 
 
3c7d0f6
 
 
 
 
472175a
 
 
 
 
 
 
 
 
3c7d0f6
 
 
e4f36f5
3c7d0f6
 
 
 
 
 
e4f36f5
 
 
 
 
 
3c7d0f6
472175a
 
 
e4f36f5
472175a
 
 
 
 
3c7d0f6
e4f36f5
472175a
3c7d0f6
 
472175a
3c7d0f6
472175a
3c7d0f6
472175a
3c7d0f6
 
 
 
 
 
 
472175a
3c7d0f6
 
472175a
3c7d0f6
472175a
 
 
 
 
5b4a293
3c7d0f6
5b4a293
 
 
 
 
 
3c7d0f6
 
170eb66
e4f36f5
 
 
5b4a293
170eb66
e4f36f5
170eb66
 
 
472175a
 
 
 
 
 
5b4a293
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
import gradio as gr
import os
import requests
from crewai import Agent, Task, Crew, Process, LLM
from dotenv import load_dotenv
from stealth_scrape_tool import StealthScrapeTool
from image_generator_tool import GenerateImageTool
from utils_tools import CalculateDiscountedPriceTool, CalculateDiscountValueTool, GetImageUrlTool, MerchantSelectorTool


load_dotenv()

class SocialMediaCrew:
    def __init__(self, openai_api_key: str, natura_api_token: str, openai_base_url: str, openai_model_name: str):
        self.openai_api_key = openai_api_key
        self.natura_api_token = natura_api_token
        self.openai_base_url = openai_base_url
        self.openai_model_name = openai_model_name
        self.scrape_tool = StealthScrapeTool() #ScrapeWebsiteTool()
        self.calculate_discounted_price_tool = CalculateDiscountedPriceTool()
        self.calculate_discount_value_tool = CalculateDiscountValueTool()
        self.image_generator_tool = GenerateImageTool()
        self.merchant_selector_tool = MerchantSelectorTool(natura_api_token=self.natura_api_token)

        print("Initializing SocialMediaCrew with BASE URL:", self.openai_base_url)
        print("Using OpenAI Model:", self.openai_model_name)
        print("Using OpenAI Key:", self.openai_api_key[:10])

        llm = LLM(
            api_key=self.openai_api_key, 
            model=self.openai_model_name, 
            base_url=self.openai_base_url
        )

        self.product_analyst = Agent(
            role='Product Analyst',
            goal='Analyze the provided URL and extract key product information',
            backstory=("You are an expert in analyzing product pages and extracting the most important information. You can identify the product name, its main features, and the target audience."),
            verbose=True,
            tools=[self.scrape_tool, 
                   self.calculate_discounted_price_tool, 
                   self.calculate_discount_value_tool],
            allow_delegation=False,
            llm=llm,
            max_retries=3
        )

        self.social_media_copywriter = Agent(
            role='Social Media Copywriter',
            goal='Create a compelling social media post in Portuguese to sell the product',
            backstory=("You are a creative copywriter specialized in the beauty and fragrance market. You know how to craft posts that are engaging, persuasive, and tailored for a Portuguese-speaking audience. You are an expert in using emojis and hashtags to increase engagement."),
            verbose=True,
            tools=[self.image_generator_tool],
            allow_delegation=False,
            llm=llm,
            max_retries=3
        )

    def _validate_url(self, product_url: str) -> bool:
        headers = {
            "accept": "*/*",
            "accept-language": "pt-BR,pt;q=0.9,en-US;q=0.8,en;q=0.7",
            "sec-ch-ua": '"Not)A;Brand";v="8", "Chromium";v="138", "Google Chrome";v="138"',
            "sec-ch-ua-mobile": "?0",
            "sec-ch-ua-platform": '"Windows"',
            "sec-fetch-dest": "empty",
            "sec-fetch-mode": "cors",
            "sec-fetch-site": "cross-site",
            "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36",
        }
        try:
            response = requests.get(product_url, headers=headers)
            response.raise_for_status()
            if '<template data-dgst="NEXT_NOT_FOUND">' in response.text:
                return False
            return True
        except requests.exceptions.RequestException as e:
            print(f"Error checking URL: {e}")
            return False

    def _prepare_merchant(self, product_url: str):
        merchant = self.merchant_selector_tool.run(product_url)
        css_selector = merchant.get_css_selector()
        short_url = merchant.shorten_url(product_url)
        return merchant, css_selector, short_url

    def _create_analyze_product_task(self, product_url: str, css_selector: str, main_cupom_discount_percentage: float, short_url: str, original_price: float, discounted_price: float) -> Task:
        task_description = (f"1. Scrape the content of the URL: {product_url} using the 'scrape_tool' with css_element = '{css_selector}'.\n"
                            "2. Extract the product name, key characteristics, and any other relevant DISCOUNT available.\n")

        if original_price is not None and original_price > 0 and discounted_price is not None and discounted_price > 0:
            task_description += (f"3. The user has provided the prices. Use ORIGINAL PRICE = {original_price} and DISCOUNTED PRICE = {discounted_price}.\n")
            final_best_price_source = str(discounted_price)
        else:
            task_description += ("3. Identify and extract the original product price and the final discounted price if existing from the scraped content. "
                                 "IGNORE any price breakdowns like 'produto' or 'consultoria'.\n")
            final_best_price_source = "the extracted final best price"

        task_description += (f"4. Use the 'Calculate Discounted Price Tool' with {final_best_price_source} and the provided DISCOUNT PERCENTAGE ({main_cupom_discount_percentage}) to get the CUPOM DISCOUNTED PRICE.\n"
                             "4.1 Use the 'Calculate Discount Value Tool' with ORIGINAL PRICE and CUPOM DISCOUNTED PRICE to get the TOTAL DISCOUNT PERCENTAGE.\n" 
                             f"5. Provide all this information, including the product name, ORIGINAL PRICE, DISCOUNTED PRICE (the one from step 3), CUPOM DISCOUNTED PRICE, and the generated short URL ({short_url}). If any of this information cannot be extracted, you MUST return 'MISSING_PRODUCT_INFO'.")

        return Task(
            description=task_description,
            agent=self.product_analyst,
            expected_output="A concise summary of the product including its name, key features, unique selling points, ORIGINAL PRICE, DISCOUNTED PRICE (the one used as the input in the tool 'Calculate Discounted Price Tool'), CUPOM DISCOUNTED PRICE, TOTAL DISCOUNT PERCENTAGE, and the SHORT SHAREABLE URL ({short_url}), OR 'MISSING_PRODUCT_INFO' if essential product details are not found."
        )

    def _create_post_task(self, analyze_product_task: Task, merchant, main_cupom: str, cupom_1: str, store_name: str) -> Task:
        template = merchant.get_template(main_cupom, cupom_1, store=store_name)
        return Task(
            description=(f"Based on the product analysis, create a CONCISE and DIRECT social media post in Portuguese, suitable for a WhatsApp group. \n If the input you receive is 'INVALID_URL' or 'MISSING_PRODUCT_INFO', you MUST stop and output only that same message.\n The post should strictly follow this template:\n {template}\n\nEnsure a URL is always present in the output. Include a clear call to action and a MAXIMUM of 2 relevant emojis. DO NOT include hashtags. Keep it short and impactful and does not forget to include the backticks around the last paragraph.\n\n If the input you receive is 'INVALID_URL', you MUST stop and output only 'INVALID_URL'."),
            agent=self.social_media_copywriter,
            expected_output="A short, direct, and impactful social media post in Portuguese for WhatsApp, strictly following the provided template, including the FINAL PRICE, any DISCOUNT, the SHORT SHAREABLE URL, a call to action, and up to 2 emojis, one in the Title and another in the Description. No hashtags should be present. A URL must always be present in the final output, OR the message 'INVALID_URL' or 'MISSING_PRODUCT_INFO' if the page was not found or product info is missing.",
            context=[analyze_product_task]
        )

    def run_crew(self, product_url: str, store_name: str, main_cupom: str, main_cupom_discount_percentage: float, cupom_1: str, original_price: float, discounted_price: float) -> str:
        if not self._validate_url(product_url):
            return "INVALID_URL"

        merchant, css_selector, short_url = self._prepare_merchant(product_url)
        
        analyze_product_task = self._create_analyze_product_task(product_url, css_selector, main_cupom_discount_percentage, short_url, original_price, discounted_price)
        create_post_task = self._create_post_task(analyze_product_task, merchant, main_cupom, cupom_1, store_name)

        crew = Crew(
            agents=[self.product_analyst, self.social_media_copywriter],
            tasks=[analyze_product_task, create_post_task],
            process=Process.sequential
        )

        print(f"Crew is kicking off for URL: {product_url}")
        result = crew.kickoff()
        return result

def clean_env_vars():
    os.environ.pop("OPENAI_API_KEY", None)
    os.environ.pop("NATURA_API_TOKEN", None)
    os.environ.pop("OPENAI_BASE_URL", None)
    os.environ.pop("OPENAI_MODEL_NAME", None)

# --- Gradio Interface ---
def generate_ad(product_url: str, store_name: str, main_cupom: str, main_cupom_discount_percentage: float, cupom_1: str, original_price: float, discounted_price: float, openai_api_key: str, natura_api_token: str, openai_base_url: str, openai_model_name: str):
    yield gr.update(interactive=False, value="Generating..."), gr.Markdown(value="⏳ Generating ad... Please wait.")

    if not openai_api_key or not natura_api_token or not openai_model_name or not openai_base_url:
        yield gr.update(interactive=True, value="Generate Ad"), gr.Markdown(value="Please configure your API keys in the settings section below.")
        return

    original_price = original_price if original_price is not None else 0
    discounted_price = discounted_price if discounted_price is not None else 0

    social_media_crew = SocialMediaCrew(openai_api_key, natura_api_token, openai_base_url, openai_model_name)
    result = social_media_crew.run_crew(product_url, store_name, main_cupom, main_cupom_discount_percentage, cupom_1, original_price, discounted_price)

    if result == "INVALID_URL":
        yield gr.update(interactive=True, value="Generate Ad"), gr.Markdown(value="❌ The provided URL is invalid or the product page could not be found.")
    elif result == "MISSING_PRODUCT_INFO":
        yield gr.update(interactive=True, value="Generate Ad"), gr.Markdown(value="⚠️ Could not extract all required product information from the URL. Please check the URL or try a different one.")
    else:
        yield gr.update(interactive=True, value="Generate Ad"), gr.Markdown(value=result.raw)

with gr.Blocks() as demo:
    gr.Markdown("# 🚀 Social Media Ad Generator")
    gr.Markdown("Enter a product URL to generate a social media ad.")

    with gr.Tab("Generate Ad"):
        url_input = gr.Textbox(label="Product URL", placeholder="Enter product URL here...")
        store_name_input = gr.Textbox(label="Store Name (e.g., O Boticário)", placeholder="Enter store name...")
        
        main_cupom_input = gr.Textbox(label="Main Cupom (e.g., PRIMEIRACOMPRA)", value="PRIMEIRACOMPRA")
        main_cupom_discount_percentage_input = gr.Number(label="Main Cupom Discount Percentage (e.g., 20 for 20%)", value=15, minimum=0, maximum=100)
        cupom_1_input = gr.Textbox(label="Cupom 1 (e.g., AMIGO15)", placeholder="Enter first coupon code...")
        original_price_input = gr.Number(label="Original Price (Optional)", value=0, minimum=0)
        discounted_price_input = gr.Number(label="Discounted Price (Optional)", value=0, minimum=0)
        with gr.Row():
            generate_button = gr.Button("Generate Ad")
            clear_button = gr.Button("Clear")
        ad_output = gr.Markdown(label="Your Generated Ad", show_copy_button=True)
        
    with gr.Tab("Fragrantica"):
        gr.Markdown("### 👃 Fragrantica Website Analyzer")
        fragrantica_url_input = gr.Textbox(label="Fragrantica Product URL", placeholder="Enter Fragrantica product URL here...")
        analyze_fragrantica_button = gr.Button("Analyze Fragrantica Product")
        fragrantica_output = gr.Markdown(label="Fragrantica Analysis Report")

    with gr.Tab("Settings"):
        gr.Markdown("### ⚙️ API Key Settings")
        gr.Markdown("Enter your API keys below. These will be used for the current session.")
        openai_key_input = gr.Textbox(label="OPENAI_API_KEY", type="password", value=os.getenv("OPENAI_API_KEY", ""))
        natura_token_input = gr.Textbox(label="NATURA_API_TOKEN", type="password", value=os.getenv("NATURA_API_TOKEN", ""))
        openai_base_url_input = gr.Textbox(label="OPENAI_BASE_URL", value=os.getenv("OPENAI_BASE_URL", "https://api.openai.com/v1"))
        openai_model_name_input = gr.Textbox(label="OPENAI_MODEL_NAME", value=os.getenv("OPENAI_MODEL_NAME", "gpt-4.1"))
        
        clean_env_vars()
        # No save button needed as keys are passed directly
        gr.Markdown("API keys are used directly from these fields when you click 'Generate Ad'. They are not saved persistently.")

    def clear_fields():
        return "", 0, 0

    generate_button.click(generate_ad, inputs=[url_input, store_name_input, main_cupom_input, main_cupom_discount_percentage_input, cupom_1_input, original_price_input, discounted_price_input, openai_key_input, natura_token_input, openai_base_url_input, openai_model_name_input], outputs=[generate_button, ad_output])
    clear_button.click(clear_fields, inputs=[], outputs=[url_input, original_price_input, discounted_price_input])

    # Placeholder for Fragrantica analysis function
    def analyze_fragrantica_url(url, openai_api_key, natura_api_token, openai_base_url, openai_model_name):
        if not openai_api_key or not openai_model_name or not openai_base_url:
            return "Please configure your API keys in the settings section below."
        from fragrantica_crew import FragranticaCrew
        fragrantica_crew = FragranticaCrew(openai_api_key, openai_base_url, openai_model_name)
        report = fragrantica_crew.kickoff(url=url)
        if report == "SCRAPING_FAILED":
            return "❌ Scraping failed. The website could not be accessed or parsed. Please check the URL or try again later."
        return report.raw

    analyze_fragrantica_button.click(analyze_fragrantica_url, inputs=[fragrantica_url_input, openai_key_input, natura_token_input, openai_base_url_input, openai_model_name_input], outputs=fragrantica_output)

if __name__ == "__main__":
    demo.launch(server_name="0.0.0.0", server_port=7860)