Spaces:
Runtime error
Runtime error
# New code in which sensor data is taken directly from thinkspace. | |
import os | |
import json | |
import requests | |
from datetime import datetime | |
import streamlit as st | |
from langchain_huggingface import HuggingFaceEmbeddings | |
from langchain_chroma import Chroma | |
from langchain_groq import ChatGroq | |
from langchain.memory import ConversationBufferMemory | |
from langchain.chains import ConversationalRetrievalChain | |
from deep_translator import GoogleTranslator | |
# Directory paths and configurations | |
working_dir = os.path.dirname(os.path.abspath(__file__)) | |
config_data = json.load(open(f"{working_dir}/config.json")) | |
GROQ_API_KEY = config_data["GROQ_API_KEY"] | |
os.environ["GROQ_API_KEY"] = GROQ_API_KEY | |
# ThinkSpace API details | |
THINGSPEAK_API_URL = "https://api.thingspeak.com/channels/2485113/feeds.json?results=2" | |
# Vectorstore setup | |
def setup_vectorstore(): | |
embeddings = HuggingFaceEmbeddings() | |
vectorstore = Chroma(persist_directory="soil_vectordb", embedding_function=embeddings) | |
return vectorstore | |
# Chatbot chain setup | |
def chat_chain(vectorstore): | |
llm = ChatGroq(model="llama-3.1-70b-versatile", temperature=0) | |
retriever = vectorstore.as_retriever() | |
memory = ConversationBufferMemory( | |
llm=llm, | |
output_key="answer", | |
memory_key="chat_history", | |
return_messages=True | |
) | |
chain = ConversationalRetrievalChain.from_llm( | |
llm=llm, | |
retriever=retriever, | |
chain_type="stuff", | |
memory=memory, | |
verbose=True, | |
return_source_documents=True | |
) | |
return chain | |
# Fetch sensor data from ThinkSpace API | |
def fetch_sensor_data(): | |
try: | |
response = requests.get(THINGSPEAK_API_URL) | |
if response.status_code == 200: | |
data = response.json() | |
feeds = data.get("feeds", []) | |
if feeds: | |
latest_feed = feeds[-1] # Get the latest feed | |
return { | |
"pH": float(latest_feed.get("field1", 0)), | |
"moisture": float(latest_feed.get("field2", 0)), | |
"temperature": float(latest_feed.get("field3", 0)), | |
"air_quality": float(latest_feed.get("field4", 0)), | |
} | |
else: | |
st.error("Failed to fetch data from ThinkSpace API. Please check the API URL or connectivity.") | |
return None | |
except Exception as e: | |
st.error(f"An error occurred while fetching sensor data: {e}") | |
return None | |
# Updated Streamlit setup with language selection dropdown | |
st.set_page_config(page_title="Soil.Ai", page_icon="🌱", layout="centered") | |
st.title("🌱 Soil.Ai - Smart Farming Recommendations") | |
st.subheader("AI-driven solutions for modern farming!") | |
# Initialize session state | |
if "username" not in st.session_state: | |
username = st.text_input("Enter your name to proceed:") | |
if username: | |
with st.spinner("Loading AI interface..."): | |
st.session_state.username = username | |
st.session_state.vectorstore = setup_vectorstore() | |
st.session_state.conversational_chain = chat_chain(st.session_state.vectorstore) | |
st.session_state.selected_language = "English" # Default language | |
st.success(f"Welcome, {username}! Start by choosing an option.") | |
else: | |
username = st.session_state.username | |
# Language options | |
languages = [ | |
"English", "Marathi", "Hindi", "Bengali", "Gujarati", "Kannada", "Malayalam", | |
"Odia", "Punjabi", "Tamil", "Telugu", "Urdu", "Spanish", "French", "German" | |
] | |
# Main interface | |
if "conversational_chain" not in st.session_state: | |
st.session_state.vectorstore = setup_vectorstore() | |
st.session_state.conversational_chain = chat_chain(st.session_state.vectorstore) | |
if "username" in st.session_state: | |
st.subheader(f"Hello {username}, choose your option below:") | |
# Dropdown for selecting output language | |
st.session_state.selected_language = st.selectbox( | |
"Select output language:", | |
languages, | |
index=languages.index(st.session_state.get("selected_language", "English")) | |
) | |
# Option selection | |
option = st.radio( | |
"Choose an action:", | |
("Ask a general agriculture-related question", "Input sensor data for recommendations", "Satellite Data", "FAQ Section") | |
) | |
# Option 1: Ask AI any agriculture-related question | |
if option == "Ask a general agriculture-related question": | |
user_query = st.chat_input("Ask AI anything about agriculture...") | |
if user_query: | |
with st.spinner("Processing your query..."): | |
# Display user's query | |
with st.chat_message("user"): | |
st.markdown(user_query) | |
# Get assistant's response | |
with st.chat_message("assistant"): | |
response = st.session_state.conversational_chain({"question": user_query}) | |
assistant_response = response["answer"] | |
# Translate response based on selected language | |
translator = GoogleTranslator(source="en", target=st.session_state.selected_language.lower()) | |
translated_response = translator.translate(assistant_response) | |
# Display response in selected language | |
st.markdown(f"**{st.session_state.selected_language}:** {translated_response}") | |
# Option 2: Input sensor data for recommendations | |
elif option == "Input sensor data for recommendations": | |
st.markdown("### Fetching data from sensors...") | |
sensor_data = fetch_sensor_data() | |
if sensor_data: | |
ph = sensor_data["pH"] | |
moisture = sensor_data["moisture"] | |
temperature = sensor_data["temperature"] | |
air_quality = sensor_data["air_quality"] | |
st.markdown(f"**Sensor Data:**\n- pH: {ph}\n- Moisture: {moisture}%\n- Temperature: {temperature}°C\n- Air Quality: {air_quality}") | |
if st.button("Get Recommendations"): | |
with st.spinner("Analyzing data..."): | |
# Prepare input query | |
user_input = f"Recommendations for:\n- pH: {ph}\n- Moisture: {moisture}%\n- Temperature: {temperature}°C\n- Air Quality: {air_quality}" | |
# Display user's input | |
with st.chat_message("user"): | |
st.markdown(user_input) | |
# Get assistant's response | |
with st.chat_message("assistant"): | |
response = st.session_state.conversational_chain({"question": user_input}) | |
assistant_response = response["answer"] | |
# Translate response based on selected language | |
translator = GoogleTranslator(source="en", target=st.session_state.selected_language.lower()) | |
translated_response = translator.translate(assistant_response) | |
# Display response in selected language | |
st.markdown(f"**{st.session_state.selected_language}:** {translated_response}") | |
# Option 3: Satellite Data | |
elif option == "Satellite Data": | |
st.markdown("### Satellite Data Functionality Coming Soon!") | |
# Option 4: FAQ Section | |
elif option == "FAQ Section": | |
st.markdown("### FAQs Coming Soon!") | |
# OLD code | |
# import os | |
# import json | |
# from datetime import datetime | |
# import streamlit as st | |
# from langchain_huggingface import HuggingFaceEmbeddings | |
# from langchain_chroma import Chroma | |
# from langchain_groq import ChatGroq | |
# from langchain.memory import ConversationBufferMemory | |
# from langchain.chains import ConversationalRetrievalChain | |
# from deep_translator import GoogleTranslator | |
# # Directory paths and configurations | |
# working_dir = os.path.dirname(os.path.abspath(__file__)) | |
# config_data = json.load(open(f"{working_dir}/config.json")) | |
# GROQ_API_KEY = config_data["GROQ_API_KEY"] | |
# os.environ["GROQ_API_KEY"] = GROQ_API_KEY | |
# # Vectorstore setup | |
# def setup_vectorstore(): | |
# embeddings = HuggingFaceEmbeddings() | |
# vectorstore = Chroma(persist_directory="soil_vectordb", embedding_function=embeddings) | |
# return vectorstore | |
# # Chatbot chain setup | |
# def chat_chain(vectorstore): | |
# llm = ChatGroq(model="llama-3.1-70b-versatile", temperature=0) | |
# retriever = vectorstore.as_retriever() | |
# memory = ConversationBufferMemory( | |
# llm=llm, | |
# output_key="answer", | |
# memory_key="chat_history", | |
# return_messages=True | |
# ) | |
# chain = ConversationalRetrievalChain.from_llm( | |
# llm=llm, | |
# retriever=retriever, | |
# chain_type="stuff", | |
# memory=memory, | |
# verbose=True, | |
# return_source_documents=True | |
# ) | |
# return chain | |
# # Updated Streamlit setup with language selection dropdown | |
# st.set_page_config(page_title="Soil.Ai", page_icon="🌱", layout="centered") | |
# st.title("🌱 Soil.Ai - Smart Farming Recommendations") | |
# st.subheader("AI-driven solutions for modern farming!") | |
# # Initialize session state | |
# if "username" not in st.session_state: | |
# username = st.text_input("Enter your name to proceed:") | |
# if username: | |
# with st.spinner("Loading AI interface..."): | |
# st.session_state.username = username | |
# st.session_state.vectorstore = setup_vectorstore() | |
# st.session_state.conversational_chain = chat_chain(st.session_state.vectorstore) | |
# st.session_state.selected_language = "English" # Default language | |
# st.success(f"Welcome, {username}! Start by choosing an option.") | |
# else: | |
# username = st.session_state.username | |
# # Language options | |
# languages = [ | |
# "English", "Marathi", "Hindi", "Bengali", "Gujarati", "Kannada", "Malayalam", | |
# "Odia", "Punjabi", "Tamil", "Telugu", "Urdu", "Spanish", "French", "German" | |
# ] | |
# # Main interface | |
# if "conversational_chain" not in st.session_state: | |
# st.session_state.vectorstore = setup_vectorstore() | |
# st.session_state.conversational_chain = chat_chain(st.session_state.vectorstore) | |
# if "username" in st.session_state: | |
# st.subheader(f"Hello {username}, choose your option below:") | |
# # Dropdown for selecting output language | |
# st.session_state.selected_language = st.selectbox( | |
# "Select output language:", | |
# languages, | |
# index=languages.index(st.session_state.get("selected_language", "English")) | |
# ) | |
# # Option selection | |
# option = st.radio( | |
# "Choose an action:", | |
# ("Ask a general agriculture-related question", "Input sensor data for recommendations", "Satellite Data", "FAQ Section") | |
# ) | |
# # Option 1: Ask AI any agriculture-related question | |
# if option == "Ask a general agriculture-related question": | |
# user_query = st.chat_input("Ask AI anything about agriculture...") | |
# if user_query: | |
# with st.spinner("Processing your query..."): | |
# # Display user's query | |
# with st.chat_message("user"): | |
# st.markdown(user_query) | |
# # Get assistant's response | |
# with st.chat_message("assistant"): | |
# response = st.session_state.conversational_chain({"question": user_query}) | |
# assistant_response = response["answer"] | |
# # Translate response based on selected language | |
# translator = GoogleTranslator(source="en", target=st.session_state.selected_language.lower()) | |
# translated_response = translator.translate(assistant_response) | |
# # Display response in selected language | |
# st.markdown(f"**{st.session_state.selected_language}:** {translated_response}") | |
# # Option 2: Input sensor data for recommendations | |
# elif option == "Input sensor data for recommendations": | |
# st.markdown("### Enter soil and environmental parameters:") | |
# ph = st.number_input("Enter Soil pH", min_value=0.0, max_value=14.0, step=0.1) | |
# moisture = st.number_input("Enter Soil Moisture (%)", min_value=0.0, max_value=100.0, step=0.1) | |
# temperature = st.number_input("Enter Temperature (°C)", min_value=-50.0, max_value=60.0, step=0.1) | |
# air_quality = st.number_input("Enter Air Quality Index (AQI)", min_value=0, max_value=500, step=1) | |
# if st.button("Get Recommendations"): | |
# if ph and moisture and temperature and air_quality: | |
# with st.spinner("Analyzing data..."): | |
# # Prepare input query | |
# user_input = f"Recommendations for:\n- pH: {ph}\n- Moisture: {moisture}%\n- Temperature: {temperature}°C\n- Air Quality: {air_quality}" | |
# # Display user's input | |
# with st.chat_message("user"): | |
# st.markdown(user_input) | |
# # Get assistant's response | |
# with st.chat_message("assistant"): | |
# response = st.session_state.conversational_chain({"question": user_input}) | |
# assistant_response = response["answer"] | |
# # Translate response based on selected language | |
# translator = GoogleTranslator(source="en", target=st.session_state.selected_language.lower()) | |
# translated_response = translator.translate(assistant_response) | |
# # Display response in selected language | |
# st.markdown(f"**{st.session_state.selected_language}:** {translated_response}") | |
# else: | |
# st.error("Please fill in all the fields!") | |
# # Option 3: Satellite Data | |
# elif option == "Satellite Data": | |
# st.markdown("### Satellite Data Functionality Coming Soon!") | |
# # Option 4: FAQ Section | |
# elif option == "FAQ Section": | |
# crop = st.radio("Select a crop for FAQs:", ("Cotton", "Tur")) | |
# if crop == "Tur": | |
# st.markdown("### *Q&A on Arhar Crop*") | |
# tur_questions = [ | |
# "Q1: What are the suitable climate and soil requirements for Arhar cultivation?", | |
# "Q2: What is the best time for sowing Arhar, and how much seed is needed per hectare?", | |
# "Q3: What are the improved varieties of Arhar and their characteristics?", | |
# "Q4: What fertilizers and irrigation are required for Arhar cultivation?", | |
# "Q5: What are the main pests and diseases affecting Arhar, and how can they be managed?" | |
# ] | |
# tur_answers = [ | |
# "A: Arhar requires a warm and dry climate with a temperature range of 25-30°C. It thrives in well-drained loamy soil with a pH value of 6.0 to 7.5.", | |
# "A: The best time for sowing Arhar is from June to July (monsoon season). The seed requirement is 15-20 kg per hectare. The seeds should be treated with Trichoderma or Carbendazim before sowing.", | |
# "A: Some improved varieties of Arhar include ICPL-87 (early maturing), Sharad (high-yielding), and Pant Arhar-3 (short-duration).", | |
# "A: Fertilizers: Nitrogen: 20 kg/hectare, Phosphorus: 50 kg/hectare. Irrigation: Two to three irrigations during flowering and pod formation stages.", | |
# "A: Pests like pod borers and diseases like wilt (root rot) affect Arhar. Control measures include spraying neem oil and using disease-resistant varieties." | |
# ] | |
# elif crop == "Cotton": | |
# st.markdown("### *Q&A on Cotton Crop*") | |
# tur_questions = [ | |
# "Q1: What is the suitable climate for cotton cultivation?", | |
# "Q2: How much water does cotton require during its growth?", | |
# "Q3: What are the common pests and diseases in cotton?", | |
# "Q4: Which fertilizers are best for cotton farming?", | |
# "Q5: What is the average yield of cotton per hectare?" | |
# ] | |
# tur_answers = [ | |
# "A: Cotton grows well in warm climates with temperatures between 21-30°C.", | |
# "A: Cotton requires about 700-1300 mm of water depending on the variety and climate.", | |
# "A: Common pests include bollworms; diseases include leaf curl virus.", | |
# "A: Use nitrogen (60 kg/ha), phosphorus (30 kg/ha), and potassium (30 kg/ha).", | |
# "A: Average yield ranges between 500-800 kg/ha depending on the variety and conditions." | |
# ] | |
# for q, a in zip(tur_questions, tur_answers): | |
# translator = GoogleTranslator(source="en", target=st.session_state.selected_language.lower()) | |
# st.markdown(f"**{translator.translate(q)}**\n\n{translator.translate(a)}") | |