RewinaB
Deploying Gradio app
359b084
raw
history blame
943 Bytes
import gradio as gr
import torch
import numpy as np
from transformers import AutoTokenizer, AutoModelForSequenceClassification
# Load model & tokenizer from the Space
model = AutoModelForSequenceClassification.from_pretrained(".")
tokenizer = AutoTokenizer.from_pretrained(".")
category_mapping = {
0: "Q/E", 1: "DA", 2: "V", 3: "DM", 4: "P",
5: "DS", 6: "EAT", 7: "AM", 8: "Other", 9: "TSC"
}
def predict(text):
inputs = tokenizer(text, return_tensors="pt", truncation=True, padding="max_length", max_length=512)
with torch.no_grad():
logits = model(inputs["input_ids"], inputs["attention_mask"])
probs = torch.softmax(logits, dim=1).numpy()
pred_class = int(np.argmax(probs))
category_name = category_mapping.get(pred_class, "Unknown")
return f"Predicted Category: {category_name} (Code: {pred_class})"
iface = gr.Interface(fn=predict, inputs="text", outputs="text")
iface.launch()