import os
from langchain_core.messages import HumanMessage
from langchain_google_genai import ChatGoogleGenerativeAI
import streamlit as st

# Set your Google API Key as an environment variable
os.environ["GOOGLE_API_KEY"] = "AIzaSyChEm29X1Pd8SzXh0OFJPu7SN-JPRi0My8"

def generate_response(contents, model_name="gemini-pro"):
    try:
        # Create the AI model with the specified name
        model = ChatGoogleGenerativeAI(model=model_name)
        
        # Create a HumanMessage with the content
        message = HumanMessage(content=contents)
        
        # Stream the model's response
        response = model.stream([message])
        
        # Collect and return the response
        response_text = ""
        for chunk in response:
            response_text += chunk.content
        return response_text
    
    except Exception as e:
        return f"Error: {str(e)}"

# Streamlit app
st.title("AI Chat with Google Generative AI")
st.write("This is a chat application using Google Generative AI with the Gemini model.")

# Initialize session state for chat history
if 'chat_history' not in st.session_state:
    st.session_state['chat_history'] = []

# Text input for user to enter a message
user_input = st.text_input("You:", "")

# Button to submit the message
if st.button("Send"):
    if user_input:
        # Generate AI response
        response = generate_response(user_input)
        
        # Add user message and AI response to chat history
        st.session_state['chat_history'].append(("You", user_input))
        st.session_state['chat_history'].append(("AI", response))
        
        # Clear the input box after sending the message
        user_input = ""
    else:
        st.write("Please enter a message.")

# Display the chat history
if st.session_state['chat_history']:
    for sender, message in st.session_state['chat_history']:
        st.write(f"{sender}: {message}")