import os import kaggle import google.generativeai as gemini import gradio as gr # Ensure that Kaggle API is authenticated correctly try: kaggle.api.authenticate() except Exception as e: print(f"Kaggle API authentication failed: {str(e)}") # Ensure that the Google Gemini API key is set properly gemini_api_key = os.getenv("GEMINI_API_KEY") if not gemini_api_key: print("Error: GEMINI_API_KEY environment variable is not set.") else: gemini.configure(api_key=gemini_api_key) # Function to handle the chat interaction def gradio_chat(usertxt, chat_history): try: # Initialize chat session with previous history chat = gemini.ChatModel.start_chat(history=chat_history) # Send user message to the Gemini model response = chat.send_message(usertxt) # Append user and assistant's responses to the chat history chat_history.append({"role": "user", "content": usertxt}) chat_history.append({"role": "assistant", "content": response.text}) return chat_history, chat_history except Exception as e: error_message = f"Error occurred: {str(e)}" chat_history.append({"role": "assistant", "content": error_message}) return chat_history, chat_history # HTML content for Gradio interface (you can customize this as needed) html_content = """

Food Safety Inspection Hub Prototype

Chat with our AI-powered assistant to report food safety concerns and interact with authorities.

""" # Define the Gradio interface with gr.Blocks() as demo: gr.HTML(html_content) chatbot = gr.Chatbot() user_input = gr.Textbox(placeholder="Enter your message here...") chat_history = gr.State([]) submit_btn = gr.Button("Submit") # When submit button is clicked, trigger the chat function submit_btn.click(gradio_chat, inputs=[user_input, chat_history], outputs=[chatbot, chat_history]) # Launch the Gradio interface demo.launch()