rjiang12's picture
Update app.py
35fbfac
import gradio as gr
import tensorflow as tf
from transformers import TFGPT2LMHeadModel, GPT2Tokenizer
#generator = pipeline('text-generation', model='gpt2')
#def func(sentence, max_length, temp):
#output_list = generator(sentence, max_length=max_length, num_return_sequences=5, temperature=float(temp))
#output_strs = [dict['generated_text'] for dict in output_list]
#return output_strs
tokenizer = GPT2Tokenizer.from_pretrained("gpt2")
model = TFGPT2LMHeadModel.from_pretrained("gpt2", pad_token_id=tokenizer.eos_token_id)
def func(sentence, max_length, temperature):
input_ids = tokenizer.encode(sentence, return_tensors='tf')
output_list = model.generate(
input_ids,
do_sample=True,
max_length=max_length,
temperature=temperature,
top_p=0.92,
top_k=0,
num_return_sequences=5
)
output_strs = [tokenizer.decode(output, skip_special_tokens=True) for output in output_list]
return output_strs
demo = gr.Interface(fn=func,
inputs=["text", gr.Slider(5, 25, value=10, step=1), gr.Slider(0.1, 10, value=0.1)],
outputs=["text", "text", "text", "text", "text"]
)
if __name__ == "__main__":
demo.launch()