import torch
if torch.cuda.is_available():
    torch.cuda.set_device(torch.device('cpu'))
else:
    torch.device('cpu')

import subprocess
import sys
import gradio as gr
from model import llm_chain_response, get_response_value
from process_documents import create_db_from_files

import subprocess

# Install unsloth
subprocess.run(["pip", "install", "unsloth[colab-new] @ git+https://github.com/unslothai/unsloth.git"])
 
llm_chain = llm_chain_response()

def chat_with_mistral(user_input):
    if not user_input:
      return "The message is not be empty."
    response = llm_chain.invoke({"query": user_input})
    print(response)

    print("---------------Response--------------")
    print(get_response_value(response["result"]))
    return get_response_value(response["result"])

def main():
    # Initialize the database
    create_db_from_files()

    # Set up and launch the Gradio interface
    iface = gr.Interface(
        fn=chat_with_mistral,
        inputs=gr.components.Textbox(label="Enter Your Message"),
        outputs=gr.components.Markdown(label="ChatbotResponse"),
        title="Resvu AI Chatbot",
        description="Interact with the Resvu API via this chatbot. Enter a message and get a response.",
        examples=["Hi, how are you", "Who are you?", "What services do you offer?", "How can I find out about upcoming community events?"],
        allow_flagging="never"
    )
    iface.launch()

if __name__ == "__main__":
    main()