Update app3.py
Browse files
app3.py
CHANGED
|
@@ -2,7 +2,7 @@ import streamlit as st
|
|
| 2 |
import pandas as pd
|
| 3 |
import plotly.express as px
|
| 4 |
from datasets import load_dataset
|
| 5 |
-
from pandasai import
|
| 6 |
from pandasai.llm.openai import OpenAI
|
| 7 |
from langchain_community.embeddings.openai import OpenAIEmbeddings
|
| 8 |
from langchain_community.vectorstores import FAISS
|
|
@@ -37,7 +37,7 @@ logger.debug(f"OPENAI_API_KEY: {api_key}")
|
|
| 37 |
logger.debug(f"PANDASAI_API_KEY: {pandasai_api_key}")
|
| 38 |
|
| 39 |
# Title of the app
|
| 40 |
-
st.title("Data Analyzer")
|
| 41 |
|
| 42 |
# Function to load datasets into session
|
| 43 |
def load_dataset_into_session():
|
|
@@ -99,9 +99,11 @@ load_dataset_into_session()
|
|
| 99 |
if st.session_state.df is not None:
|
| 100 |
df = st.session_state.df
|
| 101 |
try:
|
| 102 |
-
# Initialize
|
| 103 |
-
llm = OpenAI(
|
| 104 |
-
|
|
|
|
|
|
|
| 105 |
|
| 106 |
# Convert DataFrame to documents for RAG
|
| 107 |
documents = [
|
|
@@ -114,7 +116,7 @@ if st.session_state.df is not None:
|
|
| 114 |
for index, row in df.iterrows()
|
| 115 |
]
|
| 116 |
|
| 117 |
-
# Set up RAG
|
| 118 |
embeddings = OpenAIEmbeddings()
|
| 119 |
vectorstore = FAISS.from_documents(documents, embeddings)
|
| 120 |
retriever = vectorstore.as_retriever()
|
|
@@ -135,7 +137,7 @@ if st.session_state.df is not None:
|
|
| 135 |
pandas_question = st.text_input("Ask a question about the data (PandasAI):")
|
| 136 |
if pandas_question:
|
| 137 |
try:
|
| 138 |
-
result =
|
| 139 |
if result:
|
| 140 |
st.write("PandasAI Answer:", result)
|
| 141 |
else:
|
|
@@ -164,7 +166,7 @@ if st.session_state.df is not None:
|
|
| 164 |
)
|
| 165 |
if viz_question:
|
| 166 |
try:
|
| 167 |
-
result =
|
| 168 |
import re
|
| 169 |
code_pattern = r"```python\n(.*?)\n```"
|
| 170 |
code_match = re.search(code_pattern, result, re.DOTALL)
|
|
@@ -181,4 +183,4 @@ if st.session_state.df is not None:
|
|
| 181 |
except Exception as e:
|
| 182 |
st.error(f"An error occurred during processing: {e}")
|
| 183 |
else:
|
| 184 |
-
st.info("Please load a dataset to start analysis.")
|
|
|
|
| 2 |
import pandas as pd
|
| 3 |
import plotly.express as px
|
| 4 |
from datasets import load_dataset
|
| 5 |
+
from pandasai import SmartDataframe
|
| 6 |
from pandasai.llm.openai import OpenAI
|
| 7 |
from langchain_community.embeddings.openai import OpenAIEmbeddings
|
| 8 |
from langchain_community.vectorstores import FAISS
|
|
|
|
| 37 |
logger.debug(f"PANDASAI_API_KEY: {pandasai_api_key}")
|
| 38 |
|
| 39 |
# Title of the app
|
| 40 |
+
st.title("PandasAI and RAG Data Analyzer")
|
| 41 |
|
| 42 |
# Function to load datasets into session
|
| 43 |
def load_dataset_into_session():
|
|
|
|
| 99 |
if st.session_state.df is not None:
|
| 100 |
df = st.session_state.df
|
| 101 |
try:
|
| 102 |
+
# Initialize OpenAI LLM
|
| 103 |
+
llm = OpenAI(api_token=pandasai_api_key) # PandasAI LLM
|
| 104 |
+
|
| 105 |
+
# Create SmartDataframe for PandasAI
|
| 106 |
+
smart_df = SmartDataframe(df, config={"llm": llm})
|
| 107 |
|
| 108 |
# Convert DataFrame to documents for RAG
|
| 109 |
documents = [
|
|
|
|
| 116 |
for index, row in df.iterrows()
|
| 117 |
]
|
| 118 |
|
| 119 |
+
# Set up RAG
|
| 120 |
embeddings = OpenAIEmbeddings()
|
| 121 |
vectorstore = FAISS.from_documents(documents, embeddings)
|
| 122 |
retriever = vectorstore.as_retriever()
|
|
|
|
| 137 |
pandas_question = st.text_input("Ask a question about the data (PandasAI):")
|
| 138 |
if pandas_question:
|
| 139 |
try:
|
| 140 |
+
result = smart_df.chat(pandas_question)
|
| 141 |
if result:
|
| 142 |
st.write("PandasAI Answer:", result)
|
| 143 |
else:
|
|
|
|
| 166 |
)
|
| 167 |
if viz_question:
|
| 168 |
try:
|
| 169 |
+
result = smart_df.chat(viz_question)
|
| 170 |
import re
|
| 171 |
code_pattern = r"```python\n(.*?)\n```"
|
| 172 |
code_match = re.search(code_pattern, result, re.DOTALL)
|
|
|
|
| 183 |
except Exception as e:
|
| 184 |
st.error(f"An error occurred during processing: {e}")
|
| 185 |
else:
|
| 186 |
+
st.info("Please load a dataset to start analysis.")
|