Spaces:
Sleeping
Sleeping
import streamlit as st | |
from PyPDF2 import PdfReader | |
from langchain.text_splitter import RecursiveCharacterTextSplitter | |
import os | |
from langchain_google_genai import GoogleGenerativeAIEmbeddings | |
import google.generativeai as genai | |
from langchain_community.vectorstores import FAISS | |
from langchain_google_genai import ChatGoogleGenerativeAI | |
from langchain.chains.question_answering import load_qa_chain | |
from langchain.prompts import PromptTemplate | |
from dotenv import load_dotenv | |
# Load environment variables | |
load_dotenv() | |
# Configure API Key | |
GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY") | |
if not GOOGLE_API_KEY: | |
raise ValueError("API key not found in environment variables. Please check 'key.env'.") | |
genai.configure(api_key=GOOGLE_API_KEY) | |
# Helper Functions | |
def get_pdf_text(pdf_docs): | |
text = "" | |
for pdf in pdf_docs: | |
pdf_reader = PdfReader(pdf) | |
for page in pdf_reader.pages: | |
text += page.extract_text() | |
return text | |
def get_text_chunks(text): | |
text_splitter = RecursiveCharacterTextSplitter(chunk_size=10000, chunk_overlap=1000) | |
return text_splitter.split_text(text) | |
def get_vector_store(text_chunks): | |
embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001", google_api_key=GOOGLE_API_KEY) | |
vector_store = FAISS.from_texts(text_chunks, embedding=embeddings) | |
vector_store.save_local("faiss_index") | |
def get_conversational_chain(): | |
prompt_template = """ | |
{context} | |
You are an AI Agri Farmer Advisor. Your role is to: | |
1. Answer questions about farming, crop management, soil health, fertilizers, pest control, and more. | |
2. Provide detailed instructions, tips, and advice based on the question. | |
3. If information is not found in the document, answer based on your knowledge. | |
## Task: | |
{question} | |
Provide a detailed and helpful response. | |
""" | |
model = ChatGoogleGenerativeAI(model="gemini-pro", temperature=0.3) | |
prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question"]) | |
chain = load_qa_chain(model, chain_type="stuff", prompt=prompt) | |
return chain | |
def user_input(user_question): | |
embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001", google_api_key=GOOGLE_API_KEY) | |
faiss_index_path = "faiss_index/index.faiss" | |
if not os.path.exists(faiss_index_path): | |
return {"error": "FAISS index not found. Please ensure the documents are processed first."} | |
new_db = FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True) | |
docs = new_db.similarity_search(user_question) | |
chain = get_conversational_chain() | |
response = chain({"input_documents": docs, "question": user_question}, return_only_outputs=True) | |
return {"response": response["output_text"]} | |
# Streamlit UI | |
def main(): | |
st.set_page_config(page_title="πΎ AI Agri Farmer Chat Bot πΏ", page_icon="πΎ") | |
st.header("πΎ AI Agri Farmer Chat Bot π±") | |
st.markdown(""" | |
Welcome to the AI Agri Farmer Chat Bot! π€ Ask any farming-related questions, get advice on crops, soil, fertilizers, pest management, and more. | |
If your question is not covered in the document, the bot will answer using its knowledge base. π | |
""") | |
# Input field for user's question | |
user_query = st.text_input("π¬ Ask your question about farming or agriculture:") | |
st.caption("Example: 'What is the best fertilizer for cotton farming?'") | |
if user_query: | |
response = user_input(user_query) | |
if "error" in response: | |
st.error(response["error"]) | |
else: | |
st.write("π€ Reply: ", response["response"]) | |
# Sidebar for PDF processing | |
with st.sidebar: | |
st.title("π Document Information") | |
st.info("The chatbot uses information from `ai-farming.pdf` to answer your questions.") | |
if not os.path.exists("faiss_index/index.faiss"): | |
with st.spinner("Processing the farming document..."): | |
pdf_files = ["ai-farming.pdf"] | |
raw_text = get_pdf_text([open(pdf, "rb") for pdf in pdf_files]) | |
text_chunks = get_text_chunks(raw_text) | |
get_vector_store(text_chunks) | |
st.success("Document processed successfully!") | |
else: | |
st.info("The document index is already loaded.") | |
if __name__ == "__main__": | |
main() |