Spaces:
Sleeping
Sleeping
import gradio as gr | |
from langchain.chains import RetrievalQA | |
from langchain_groq import ChatGroq | |
from langchain.vectorstores import FAISS | |
from langchain.embeddings import HuggingFaceEmbeddings # type: ignore | |
# Load FAISS Index and Embeddings | |
embeddings = HuggingFaceEmbeddings(model_name='all-MiniLM-L6-v2') | |
vector_store = FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True) | |
# Initialize LLM | |
llm = ChatGroq( | |
model_name="llama-3.3-70b-versatile", | |
temperature=1, | |
groq_api_key="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" | |
) | |
# Create RAG Chain | |
qa_chain = RetrievalQA.from_chain_type( | |
llm=llm, | |
chain_type="stuff", | |
retriever=vector_store.as_retriever() | |
) | |
# Define Query Function | |
def search_courses(query): | |
result = qa_chain.run(query) | |
return result | |
# Gradio Interface | |
iface = gr.Interface( | |
fn=search_courses, | |
inputs=gr.Textbox(label="Enter your query"), | |
outputs=gr.Textbox(label="Response"), | |
title="Smart Course Search Tool", | |
description="Ask questions to find the best free courses on Analytics Vidhya." | |
) | |
# Launch the app | |
iface.launch() | |