nnilayy commited on
Commit
ada097e
·
verified ·
1 Parent(s): b11646a

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -0
app.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
+
4
+ # Load the model and tokenizer
5
+ tokenizer = AutoTokenizer.from_pretrained("meta-llama/Meta-Llama-3-8B")
6
+ model = AutoModelForCausalLM.from_pretrained("meta-llama/Meta-Llama-3-8B")
7
+
8
+ def generate_text(prompt):
9
+ inputs = tokenizer.encode(prompt, return_tensors="pt")
10
+ outputs = model.generate(inputs, max_length=50, num_return_sequences=1)
11
+ generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
12
+ return generated_text
13
+
14
+ iface = gr.Interface(fn=generate_text, inputs="text", outputs="text")
15
+ iface.launch()