File size: 1,901 Bytes
a51503a
1a5b5ad
a51503a
 
1a5b5ad
a51503a
1a5b5ad
 
 
 
a51503a
1a5b5ad
 
 
 
a51503a
1a5b5ad
 
a51503a
1a5b5ad
 
 
 
 
 
 
 
 
 
 
 
a51503a
1a5b5ad
 
 
 
 
 
 
 
 
 
 
 
 
 
a51503a
 
1a5b5ad
a51503a
1a5b5ad
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch

# Load models and tokenizers
def load_models():
    # Load a conversational model and tokenizer (you can customize it further)
    model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-medium")
    tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-medium")
    return model, tokenizer

# Generate responses
def chat_with_model(user_input, model, tokenizer, chat_history):
    # Tokenize the user input and chat history
    new_user_input_ids = tokenizer.encode(user_input + tokenizer.eos_token, return_tensors='pt')

    # Append new user input to chat history
    bot_input_ids = torch.cat([chat_history, new_user_input_ids], dim=-1) if chat_history is not None else new_user_input_ids

    # Generate a response from the model
    chat_history = model.generate(bot_input_ids, max_length=1000, pad_token_id=tokenizer.eos_token_id)

    # Decode the model's output and return
    bot_output = tokenizer.decode(chat_history[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True)
    return chat_history, bot_output

# Initialize model and tokenizer
model, tokenizer = load_models()

# Build Gradio interface
def build_gradio_interface():
    with gr.Blocks() as demo:
        gr.Markdown("# Chagrin AI Chatbot")

        # Set up chat window
        chatbot = gr.Chatbot()

        # Create text input box for user to type
        user_input = gr.Textbox(label="Type your message", placeholder="Ask something...", interactive=True)

        # Create button for sending the input
        submit_btn = gr.Button("Send Message")

        # Button click function
        submit_btn.click(chat_with_model, inputs=[user_input, model, tokenizer, chatbot], outputs=[chatbot, chatbot])

    demo.launch()

# Run the Gradio interface
if __name__ == "__main__":
    build_gradio_interface()