import gradio as gr from rag_engine import RAGEngine import torch import os import logging import traceback import asyncio # Configure logging logging.basicConfig( level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s' ) logger = logging.getLogger(__name__) def safe_search(query, max_results): """Wrapper function to handle errors gracefully""" try: rag = RAGEngine() results = asyncio.run(rag.search_and_process(query, max_results)) return format_results(results) except Exception as e: error_msg = f"An error occurred: {str(e)}\n\nTraceback:\n{traceback.format_exc()}" logger.error(error_msg) return f"# ❌ Error\nSorry, an error occurred while processing your search:\n```\n{str(e)}\n```" def format_results(results): """Format search results for display""" if not results: return "# ⚠️ No Results\nNo search results were found. Please try a different query." formatted = f"# 🔍 Search Results\n\n" # Add insights section if 'insights' in results: formatted += f"## 💡 Key Insights\n{results['insights']}\n\n" # Add follow-up questions if 'follow_up_questions' in results: formatted += "## ❓ Follow-up Questions\n" for q in results['follow_up_questions']: if q and q.strip(): formatted += f"- {q.strip()}\n" formatted += "\n" # Add main results if 'results' in results: formatted += "## 📄 Detailed Results\n\n" for i, result in enumerate(results['results'], 1): formatted += f"### {i}. " if 'url' in result: formatted += f"[{result.get('title', 'Untitled')}]({result['url']})\n" else: formatted += f"{result.get('title', 'Untitled')}\n" if result.get('processed_content'): content = result['processed_content'] if 'summary' in content: formatted += f"**Summary:** {content['summary']}\n\n" if content.get('metadata', {}).get('description'): formatted += f"**Description:** {content['metadata']['description']}\n\n" if content.get('content_type') == 'code': formatted += f"**Code Analysis:** {content.get('explanation', '')}\n\n" else: formatted += f"**Detailed Explanation:** {content.get('explanation', '')}\n\n" if 'snippet' in result: formatted += f"**Snippet:** {result['snippet']}\n\n" formatted += "---\n\n" # Add similar queries if available if results.get('similar_queries'): formatted += "## 🔄 Related Searches\n" for query in results['similar_queries']: if isinstance(query, dict) and 'query' in query: formatted += f"- {query['query']}\n" elif isinstance(query, str): formatted += f"- {query}\n" return formatted def create_demo(): """Create the Gradio interface""" # Create cache directory os.makedirs(".cache", exist_ok=True) demo = gr.Blocks( title="AI-Powered Search Engine", css=""" .gradio-container {max-width: 1200px !important} .markdown-text {font-size: 16px !important} """ ) with demo: gr.Markdown(""" # 🔍 Intelligent Web Search Engine This advanced search engine uses AI to provide deep understanding of search results: - 🧠 Multi-model AI analysis - 📊 Semantic search and caching - 💡 Automatic insights generation - ❓ Smart follow-up questions - 🔄 Related searches """) with gr.Row(): with gr.Column(): query = gr.Textbox( label="Search Query", placeholder="Enter your search query...", lines=2 ) max_results = gr.Slider( minimum=3, maximum=10, value=5, step=1, label="Maximum Results" ) search_btn = gr.Button("🔍 Search", variant="primary") with gr.Column(): output = gr.Markdown( label="Results", show_label=False ) search_btn.click( fn=safe_search, inputs=[query, max_results], outputs=output ) gr.Examples( examples=[ ["What are the latest developments in quantum computing?", 5], ["How does Python's asyncio work? Show code examples", 5], ["Explain the transformer architecture in deep learning", 5], ["What are the environmental impacts of renewable energy?", 5] ], inputs=[query, max_results], outputs=output, fn=safe_search, cache_examples=True ) return demo # Create the demo demo = create_demo() # Launch for Spaces demo.launch()