import os import pandas as pd import gradio as gr from langchain.document_loaders import CSVLoader from langchain.vectorstores import FAISS from langchain.embeddings import HuggingFaceEmbeddings from langchain.chains import RetrievalQA from langchain_groq import ChatGroq api_key = os.environ.get("GROQ_API_KEY") if not api_key: raise ValueError("Api key not found") os.environ["GROQ_API_KEY"] = api_key embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2") llm = ChatGroq( model="mixtral-8x7b-32768", temperature=0, max_tokens=None, timeout=None, max_retries=2 ) # function to process query and CSV def process_query(file, query): try: loader = CSVLoader(file_path=file.name) documents = loader.load() # FAISS vector store vector_store = FAISS.from_documents(documents, embeddings) retriever = vector_store.as_retriever() qa_chain = RetrievalQA.from_chain_type( #RetrievalQA pipeline llm=llm, retriever=retriever, return_source_documents=True ) # Get the response response = qa_chain({"query": query}) result = response["result"] sources = "\n".join([doc.page_content for doc in response["source_documents"]]) return result, sources except Exception as e: return f"An error occurred: {str(e)}", "" # Gradio interface interface = gr.Interface( fn=process_query, inputs=[ gr.File(label="Upload CSV File"), gr.Textbox(label="Enter your query") ], outputs=[ gr.Textbox(label="Answer"), gr.Textbox(label="Source Documents") # ], title="DataScope.ai", description="Upload & Unlock Insights from Your Data – Ask, Query, Discover!" ) interface.launch(share=True)