File size: 1,263 Bytes
d637d96
9fb533b
 
db5f61b
9fb533b
d637d96
9fb533b
 
 
 
 
 
 
 
 
 
 
aadd5d4
 
ab8a7ef
65a6fca
aadd5d4
6aeeb8f
65a6fca
9fb533b
00d3b24
65a6fca
9fb533b
d637d96
35fbfac
 
 
 
d637d96
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
import gradio as gr
import tensorflow as tf
from transformers import TFGPT2LMHeadModel, GPT2Tokenizer

#generator = pipeline('text-generation', model='gpt2')

#def func(sentence, max_length, temp):
    #output_list = generator(sentence, max_length=max_length, num_return_sequences=5, temperature=float(temp))
    #output_strs = [dict['generated_text'] for dict in output_list]
    #return output_strs

tokenizer = GPT2Tokenizer.from_pretrained("gpt2")
model = TFGPT2LMHeadModel.from_pretrained("gpt2", pad_token_id=tokenizer.eos_token_id)

def func(sentence, max_length, temperature):
    input_ids = tokenizer.encode(sentence, return_tensors='tf')
    output_list = model.generate(
        input_ids, 
        do_sample=True, 
        max_length=max_length,
        temperature=temperature,
        top_p=0.92, 
        top_k=0,
        num_return_sequences=5
    )
    output_strs = [tokenizer.decode(output, skip_special_tokens=True) for output in output_list]
    return output_strs
    

demo = gr.Interface(fn=func, 
                    inputs=["text", gr.Slider(5, 25, value=10, step=1), gr.Slider(0.1, 10, value=0.1)], 
                    outputs=["text", "text", "text", "text", "text"]
                   )

if __name__ == "__main__":
    demo.launch()