Jeff2323 commited on
Commit
a9ccb72
·
1 Parent(s): ff92373

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +41 -24
app.py CHANGED
@@ -1,25 +1,42 @@
1
  import gradio as gr
2
- from transformers import GPT2LMHeadModel, GPT2Tokenizer
3
-
4
- # Carregue o modelo GPT-2 pré-treinado e o tokenizador
5
- model_name = "gpt2" # Pode ser substituído por outros modelos GPT-2 maiores
6
- tokenizer = GPT2Tokenizer.from_pretrained(model_name)
7
- model = GPT2LMHeadModel.from_pretrained(model_name)
8
-
9
- # Função para gerar respostas com base na entrada do usuário
10
- def chatbot(input_text):
11
- input_ids = tokenizer.encode(input_text, return_tensors="pt")
12
- response_ids = model.generate(input_ids, max_length=50, num_return_sequences=1)
13
- response = tokenizer.decode(response_ids[0], skip_special_tokens=True)
14
- return response
15
-
16
- # Interface Gradio para o chatbot
17
- iface = gr.Interface(
18
- fn=chatbot,
19
- inputs="text",
20
- outputs="text",
21
- title="Chatbot GPT-2",
22
- description="Converse com o Chatbot GPT-2 usando texto.",
23
- )
24
-
25
- iface.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
+ from gradio import mix
3
+
4
+ title = "GPT2"
5
+ description = "Gradio Demo for OpenAI GPT2. To use it, simply add your text, or click one of the examples to load them. Read more at the links below."
6
+
7
+ article = "<p style='text-align: center'><a href='https://d4mucfpksywv.cloudfront.net/better-language-models/language_models_are_unsupervised_multitask_learners.pdf' target='_blank'>Language Models are Unsupervised Multitask Learners</a></p>"
8
+
9
+ examples = [
10
+ ['Paris is the capital of',"gpt2-medium"]
11
+ ]
12
+
13
+ io1 = gr.Interface.load("huggingface/distilgpt2")
14
+
15
+ io2 = gr.Interface.load("huggingface/gpt2-large")
16
+
17
+ io3 = gr.Interface.load("huggingface/gpt2-medium")
18
+
19
+ io4 = gr.Interface.load("huggingface/gpt2-xl")
20
+
21
+ def inference(text, model):
22
+ if model == "gpt2-large":
23
+ outtext = io2(text)
24
+ elif model == "gpt2-medium":
25
+ outtext = io3(text)
26
+ elif model == "gpt2-xl":
27
+ outtext = io4(text)
28
+ else:
29
+ outtext = io1(text)
30
+ return outtext
31
+
32
+
33
+
34
+ gr.Interface(
35
+ inference,
36
+ [gr.inputs.Textbox(label="Input"),gr.inputs.Dropdown(choices=["distilgpt2","gpt2-medium","gpt2-large","gpt2-xl"], type="value", default="gpt2-medium", label="model")
37
+ ],
38
+ gr.outputs.Textbox(label="Output"),
39
+ examples=examples,
40
+ article=article,
41
+ title=title,
42
+ description=description).launch(enable_queue=True)