Krish30 commited on
Commit
785c4f1
·
verified ·
1 Parent(s): 7ab30e2

Upload 5 files

Browse files
Files changed (5) hide show
  1. app.py +162 -0
  2. chat_history.db +0 -0
  3. config.json +1 -0
  4. requirements.txt +12 -0
  5. vectorize_documents.py +86 -0
app.py ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import json
3
+ import sqlite3
4
+ from datetime import datetime
5
+ import streamlit as st
6
+ from langchain_huggingface import HuggingFaceEmbeddings
7
+ from langchain_chroma import Chroma
8
+ from langchain_groq import ChatGroq
9
+ from langchain.memory import ConversationBufferMemory
10
+ from langchain.chains import ConversationalRetrievalChain
11
+
12
+ # Directory paths and configurations
13
+ working_dir = os.path.dirname(os.path.abspath(__file__))
14
+ config_data = json.load(open(f"{working_dir}/config.json"))
15
+ GROQ_API_KEY = config_data["GROQ_API_KEY"]
16
+ os.environ["GROQ_API_KEY"] = GROQ_API_KEY
17
+
18
+ # Database setup
19
+ def setup_db():
20
+ conn = sqlite3.connect("chat_history.db", check_same_thread=False)
21
+ cursor = conn.cursor()
22
+ cursor.execute("""
23
+ CREATE TABLE IF NOT EXISTS chat_histories (
24
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
25
+ username TEXT,
26
+ timestamp TEXT,
27
+ day TEXT,
28
+ user_input TEXT,
29
+ assistant_response TEXT
30
+ )
31
+ """)
32
+ conn.commit()
33
+ return conn
34
+
35
+ # Save chat history to SQLite
36
+ def save_chat_history(conn, username, timestamp, day, user_input, assistant_response):
37
+ cursor = conn.cursor()
38
+ cursor.execute("""
39
+ INSERT INTO chat_histories (username, timestamp, day, user_input, assistant_response)
40
+ VALUES (?, ?, ?, ?, ?)
41
+ """, (username, timestamp, day, user_input, assistant_response))
42
+ conn.commit()
43
+
44
+ # Vectorstore setup
45
+ def setup_vectorstore():
46
+ embeddings = HuggingFaceEmbeddings()
47
+ vectorstore = Chroma(persist_directory="soil_vectordb", embedding_function=embeddings)
48
+ return vectorstore
49
+
50
+ # Chatbot chain setup
51
+ def chat_chain(vectorstore):
52
+ llm = ChatGroq(model="llama-3.1-70b-versatile", temperature=0)
53
+ retriever = vectorstore.as_retriever()
54
+ memory = ConversationBufferMemory(
55
+ llm=llm,
56
+ output_key="answer",
57
+ memory_key="chat_history",
58
+ return_messages=True
59
+ )
60
+ chain = ConversationalRetrievalChain.from_llm(
61
+ llm=llm,
62
+ retriever=retriever,
63
+ chain_type="stuff",
64
+ memory=memory,
65
+ verbose=True,
66
+ return_source_documents=True
67
+ )
68
+ return chain
69
+
70
+ # Streamlit setup
71
+ st.set_page_config(page_title="Soil.Ai", page_icon="🌱", layout="centered")
72
+ st.title("🌱 Soil.Ai - Smart Farming Recommendations")
73
+ st.subheader("AI-driven solutions for modern farming!")
74
+
75
+ # Initialize database and session state
76
+ if "conn" not in st.session_state:
77
+ st.session_state.conn = setup_db()
78
+
79
+ if "username" not in st.session_state:
80
+ username = st.text_input("Enter your name to proceed:")
81
+ if username:
82
+ with st.spinner("Loading AI interface..."):
83
+ st.session_state.username = username
84
+ st.session_state.chat_history = []
85
+ st.session_state.vectorstore = setup_vectorstore()
86
+ st.session_state.conversational_chain = chat_chain(st.session_state.vectorstore)
87
+ st.success(f"Welcome, {username}! Start by choosing an option.")
88
+ else:
89
+ username = st.session_state.username
90
+
91
+ # Main interface
92
+ if "conversational_chain" not in st.session_state:
93
+ st.session_state.vectorstore = setup_vectorstore()
94
+ st.session_state.conversational_chain = chat_chain(st.session_state.vectorstore)
95
+
96
+ if "username" in st.session_state:
97
+ st.subheader(f"Hello {username}, choose your option below:")
98
+
99
+ # Option selection: Ask a general question or input sensor data
100
+ option = st.radio(
101
+ "Choose an action:",
102
+ ("Ask a general agriculture-related question", "Input sensor data for recommendations")
103
+ )
104
+
105
+ # Option 1: Ask AI any agriculture-related question
106
+ if option == "Ask a general agriculture-related question":
107
+ user_query = st.chat_input("Ask AI anything about agriculture...")
108
+ if user_query:
109
+ with st.spinner("Processing your query..."):
110
+ # Display user's query
111
+ with st.chat_message("user"):
112
+ st.markdown(user_query)
113
+
114
+ # Get assistant's response
115
+ with st.chat_message("assistant"):
116
+ response = st.session_state.conversational_chain({"question": user_query})
117
+ assistant_response = response["answer"]
118
+ st.markdown(assistant_response)
119
+
120
+ # Save chat history
121
+ st.session_state.chat_history.append({"role": "user", "content": user_query})
122
+ st.session_state.chat_history.append({"role": "assistant", "content": assistant_response})
123
+
124
+ # Save to database
125
+ timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
126
+ day = datetime.now().strftime("%A")
127
+ save_chat_history(st.session_state.conn, username, timestamp, day, user_query, assistant_response)
128
+
129
+ # Option 2: Input sensor data for recommendations
130
+ elif option == "Input sensor data for recommendations":
131
+ st.markdown("### Enter soil and environmental parameters:")
132
+ ph = st.number_input("Enter Soil pH", min_value=0.0, max_value=14.0, step=0.1)
133
+ moisture = st.number_input("Enter Soil Moisture (%)", min_value=0.0, max_value=100.0, step=0.1)
134
+ temperature = st.number_input("Enter Temperature (°C)", min_value=-50.0, max_value=60.0, step=0.1)
135
+ air_quality = st.number_input("Enter Air Quality Index (AQI)", min_value=0, max_value=500, step=1)
136
+
137
+ if st.button("Get Recommendations"):
138
+ if ph and moisture and temperature and air_quality:
139
+ with st.spinner("Analyzing data..."):
140
+ # Prepare input query
141
+ user_input = f"Recommendations for:\n- pH: {ph}\n- Moisture: {moisture}%\n- Temperature: {temperature}°C\n- Air Quality: {air_quality}"
142
+
143
+ # Display user's input
144
+ with st.chat_message("user"):
145
+ st.markdown(user_input)
146
+
147
+ # Get assistant's response
148
+ with st.chat_message("assistant"):
149
+ response = st.session_state.conversational_chain({"question": user_input})
150
+ assistant_response = response["answer"]
151
+ st.markdown(assistant_response)
152
+
153
+ # Save chat history
154
+ st.session_state.chat_history.append({"role": "user", "content": user_input})
155
+ st.session_state.chat_history.append({"role": "assistant", "content": assistant_response})
156
+
157
+ # Save to database
158
+ timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
159
+ day = datetime.now().strftime("%A")
160
+ save_chat_history(st.session_state.conn, username, timestamp, day, user_input, assistant_response)
161
+ else:
162
+ st.error("Please fill in all the fields!")
chat_history.db ADDED
Binary file (12.3 kB). View file
 
config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"GROQ_API_KEY": "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"}
requirements.txt ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ streamlit==1.38.0
2
+ langchain-community==0.2.16
3
+ langchain-text-splitters==0.2.4
4
+ langchain-chroma==0.1.3
5
+ langchain-huggingface==0.0.3
6
+ langchain-groq==0.1.9
7
+ unstructured==0.15.0
8
+ unstructured[pdf]==0.15.0
9
+ nltk==3.8.1
10
+ psycopg2-binary
11
+ pgvector
12
+ langchain_postgres
vectorize_documents.py ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain_community.document_loaders import UnstructuredFileLoader
2
+ from langchain_community.document_loaders import DirectoryLoader
3
+ from langchain_text_splitters import CharacterTextSplitter
4
+ from langchain_huggingface import HuggingFaceEmbeddings
5
+ from langchain_chroma import Chroma
6
+
7
+
8
+ # # Define a function to perform vectorization
9
+ def vectorize_documents():
10
+ embeddings = HuggingFaceEmbeddings()
11
+
12
+ loader = DirectoryLoader(
13
+ path="Data",
14
+ glob="./*.pdf",
15
+ loader_cls=UnstructuredFileLoader
16
+ )
17
+
18
+ documents = loader.load()
19
+
20
+ # Splitting the text and creating chunks of these documents.
21
+ text_splitter = CharacterTextSplitter(
22
+ chunk_size=2000,
23
+ chunk_overlap=500
24
+ )
25
+
26
+ text_chunks = text_splitter.split_documents(documents)
27
+
28
+ # Store in Chroma vector DB
29
+ vectordb = Chroma.from_documents(
30
+ documents=text_chunks,
31
+ embedding=embeddings,
32
+ persist_directory="soil_vectordb"
33
+ )
34
+
35
+ print("Documents Vectorized and saved in VectorDB")
36
+
37
+
38
+
39
+ # Expose embeddings if needed
40
+ embeddings = HuggingFaceEmbeddings()
41
+
42
+
43
+ # Main guard to prevent execution on import
44
+ if __name__ == "__main__":
45
+ vectorize_documents()
46
+
47
+
48
+
49
+ # # Define a function to perform vectorization
50
+ # def vectorize_documents():
51
+ # # Loading the embedding model
52
+ # embeddings = HuggingFaceEmbeddings()
53
+
54
+ # loader = DirectoryLoader(
55
+ # path="Data",
56
+ # glob="./*.pdf",
57
+ # loader_cls=UnstructuredFileLoader
58
+ # )
59
+
60
+ # documents = loader.load()
61
+
62
+ # # Splitting the text and creating chunks of these documents.
63
+ # text_splitter = CharacterTextSplitter(
64
+ # chunk_size=2000,
65
+ # chunk_overlap=500
66
+ # )
67
+
68
+ # text_chunks = text_splitter.split_documents(documents)
69
+
70
+ # # Store in Chroma vector DB
71
+ # vectordb = Chroma.from_documents(
72
+ # documents=text_chunks,
73
+ # embedding=embeddings,
74
+ # persist_directory="vector_db_dir"
75
+ # )
76
+
77
+ # print("Documents Vectorized and saved in VectorDB")
78
+
79
+
80
+ # # Expose embeddings if needed
81
+ # embeddings = HuggingFaceEmbeddings()
82
+
83
+
84
+ # # Main guard to prevent execution on import
85
+ # if __name__ == "__main__":
86
+ # vectorize_documents()