File size: 673 Bytes
7d906de
 
 
c5c67e6
7d906de
c5c67e6
2b2bb26
e20d7c3
971ab07
2c759c2
7d906de
971ab07
 
 
7d906de
971ab07
8dd1f0e
1f156ff
971ab07
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
import gradio as gr
from model import *

DEVICE = "cuda" if torch.cuda.is_available() else "cpu"

model = GPTLanguageModel().to(DEVICE)

model.load_state_dict(torch.load("mini-gpt.pth",map_location=DEVICE), strict=False)
model.eval()
answer = decode(model.generate(context, max_new_tokens=3000)[0].tolist())

def display(text,number):
    combined_text = text + answer[:number + 1]
    return combined_text

input_box = gr.Textbox()
input_slider = gr.Slider(minimum=500, maximum=2000, default=500, label="Select the maxium number of tokens/words:",step=100)
output_text = gr.Textbox() 
gr.Interface(fn=display, inputs=[input_box,input_slider], outputs=output_text).launch()