sauvivek's picture
Create app.py
1e0a03c verified
import streamlit as st
from transformers import AutoModelForCausalLM, AutoTokenizer
# Load pre-trained models
model_small_name = "microsoft/DialoGPT-small"
model_large_name = "microsoft/DialoGPT-medium"
tokenizer_small = AutoTokenizer.from_pretrained(model_small_name)
model_small = AutoModelForCausalLM.from_pretrained(model_small_name)
tokenizer_large = AutoTokenizer.from_pretrained(model_large_name)
model_large = AutoModelForCausalLM.from_pretrained(model_large_name)
# Function to generate responses
def generate_response(input_text, model, tokenizer):
inputs = tokenizer.encode(input_text + tokenizer.eos_token, return_tensors='pt')
outputs = model.generate(inputs, max_length=1000, pad_token_id=tokenizer.eos_token_id, no_repeat_ngram_size=3)
return tokenizer.decode(outputs[0], skip_special_tokens=True)
# Streamlit app
st.title("Mental Health Chatbot")
# User input
user_input = st.text_input("You:", "")
if user_input:
# Generate responses for both models
response_small = generate_response(user_input, model_small, tokenizer_small)
response_large = generate_response(user_input, model_large, tokenizer_large)
# Display responses
st.subheader("DialoGPT-small Response:")
st.write(response_small)
st.subheader("DialoGPT-medium Response:")
st.write(response_large)