Spaces:
Runtime error
Runtime error
File size: 933 Bytes
e99dd13 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 |
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
tokenizer = AutoTokenizer.from_pretrained("gabtan99/dialogpt-tagalog-medium")
model = AutoModelForCausalLM.from_pretrained("gabtan99/dialogpt-tagalog-medium")
def chat(message, token_response):
token_message = tokenizer.encode(message + tokenizer.eos_token, return_tensors='pt')
token_response = model.generate(token_message, max_length=1000, pad_token_id=tokenizer.eos_token_id)
response = tokenizer.decode(token_response[:, token_message.shape[-1]:][0], skip_special_tokens=True)
return response, token_response
input = gr.inputs.Textbox(lines=2, label='User:')
output = gr.outputs.Textbox(label='Bot:')
gr.Interface(fn=chat,
title="DialoGPT Tagalog",
inputs=[input, "state"],
outputs=[output, "state"],
allow_screenshot=False,
allow_flagging='never',
article="Model from: gabtan99").launch()
|