xgen / app.py
mrchuy's picture
Update app.py
2720910
raw
history blame
808 Bytes
import gradio as gr
from transformers import pipeline
import torch
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("Salesforce/xgen-7b-8k-base", trust_remote_code=True)
model = AutoModelForCausalLM.from_pretrained("Salesforce/xgen-7b-8k-base", torch_dtype=torch.bfloat16)
print()
# pipeline = pipeline(task="image-classification", model="julien-c/hotdog-not-hotdog")
def gentext(user_input="The world is"):
inputs = tokenizer(user_input, return_tensors="pt")
sample = model.generate(**inputs, max_length=128)
return {"output": tokenizer.decode(sample[0])}
gr.Interface(
gentext,
inputs=gr.inputs.Text(label="Some prompt", type="input"),
outputs=gr.outputs.Label(num_top_classes=2),
title="Some prompt",
).launch()