Spaces:
Sleeping
Sleeping
Commit
·
e480997
1
Parent(s):
d2a3bc4
testing
Browse files
app.py
CHANGED
@@ -1,7 +1,26 @@
|
|
1 |
import gradio as gr
|
|
|
|
|
2 |
|
3 |
-
|
4 |
-
|
|
|
5 |
|
6 |
-
|
7 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
+
from transformers import AutoTokenizer, AutoModelForSequenceClassification
|
3 |
+
import torch
|
4 |
|
5 |
+
model_name = "cross-encoder/multi-nli-xlm-r-100"
|
6 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
7 |
+
model = AutoModelForSequenceClassification.from_pretrained(model_name)
|
8 |
|
9 |
+
def generate_prediction(input_text):
|
10 |
+
input_ids = tokenizer.encode(input_text, truncation=True, padding=True, return_tensors='pt')
|
11 |
+
outputs = model(input_ids)
|
12 |
+
predicted_label = torch.argmax(outputs.logits)
|
13 |
+
label_map = {0: "entailment", 1: "neutral", 2: "contradiction"}
|
14 |
+
predicted_label_text = label_map[predicted_label.item()]
|
15 |
+
return predicted_label_text
|
16 |
+
|
17 |
+
input_text = gr.inputs.Textbox(label="Input text")
|
18 |
+
output_text = gr.outputs.Textbox(label="Output text")
|
19 |
+
|
20 |
+
gr.Interface(
|
21 |
+
generate_prediction,
|
22 |
+
inputs=input_text,
|
23 |
+
outputs=output_text,
|
24 |
+
title="Text Classifier",
|
25 |
+
description="A Hugging Face cross-encoder model for text classification.",
|
26 |
+
).launch()
|