import gradio as gr
import pandas as pd
from io import StringIO
from pandasai import PandasAI
from pandasai.llm.huggingface import HuggingFaceLLM

# Initialize an open-source LLM.
# Here we use "google/flan-t5-small", a small free model from Hugging Face.
llm = HuggingFaceLLM(model_name="google/flan-t5-small")
pandas_ai = PandasAI(llm)

def process_file_and_query(file_obj, question):
    """
    This function reads the uploaded CSV file, converts it into a DataFrame,
    and then uses PandasAI to answer the user's question about the data.
    """
    if file_obj is None:
        return "Please upload a CSV file."
    
    try:
        # Read the file content. file_obj.read() returns bytes, so decode to string.
        file_contents = file_obj.read().decode("utf-8")
        # Use StringIO to convert the string data into a stream for pandas
        df = pd.read_csv(StringIO(file_contents))
    except Exception as e:
        return f"Error reading CSV file: {e}"
    
    try:
        # Use PandasAI to answer the question using the DataFrame.
        answer = pandas_ai.run(df, prompt=question)
        return answer
    except Exception as e:
        return f"Error processing the query: {e}"

# Create a Gradio interface.
iface = gr.Interface(
    fn=process_file_and_query,
    inputs=[
        gr.File(label="Upload CSV file"),
        gr.Textbox(label="Ask a question about your data", placeholder="E.g., What is the average of column X?")
    ],
    outputs="text",
    title="Chat with Your CSV",
    description=(
        "Upload your CSV file and ask questions about the data. "
        "This app uses an open-source LLM (google/flan-t5-small) via PandasAI to answer your questions interactively."
    )
)

if __name__ == "__main__":
    iface.launch()