Uploading app.py file
Browse files
app.py
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import torch
|
3 |
+
from transformers import AutoModelForSequenceClassification, AutoTokenizer
|
4 |
+
|
5 |
+
# Defining the model path relative to the repository root.
|
6 |
+
model_path = "abdullah123456/NLP_Project"
|
7 |
+
|
8 |
+
# Loading the model and tokenizer from the local directory.
|
9 |
+
# The parameter `local_files_only=True` ensures that the files are loaded from the repository.
|
10 |
+
model = AutoModelForSequenceClassification.from_pretrained(model_path, local_files_only=True)
|
11 |
+
tokenizer = AutoTokenizer.from_pretrained(model_path, local_files_only=True)
|
12 |
+
|
13 |
+
# Defining a simple text cleaning function
|
14 |
+
def clean_text(text):
|
15 |
+
return " ".join(text.split())
|
16 |
+
|
17 |
+
# Defining the prediction function that the web interface will use.
|
18 |
+
def predict_sentiment(tweet: str) -> str:
|
19 |
+
|
20 |
+
# Cleaning the tweet
|
21 |
+
tweet_clean = clean_text(tweet)
|
22 |
+
|
23 |
+
# Tokenizing the tweet.
|
24 |
+
inputs = tokenizer(tweet_clean, return_tensors="pt", truncation=True, padding="max_length", max_length=128)
|
25 |
+
|
26 |
+
# Moving the input tensors to the same device as the model.
|
27 |
+
inputs = {k: v.to(model.device) for k, v in inputs.items()}
|
28 |
+
|
29 |
+
with torch.no_grad():
|
30 |
+
outputs = model(**inputs)
|
31 |
+
# Getting the predicted class index.
|
32 |
+
predicted_class = torch.argmax(outputs.logits, dim=1).item()
|
33 |
+
|
34 |
+
# Defining label mapping.
|
35 |
+
label_mapping = {0: "negative", 1: "neutral", 2: "positive"}
|
36 |
+
return label_mapping.get(predicted_class, "unknown")
|
37 |
+
|
38 |
+
# Creating the Gradio Interface.
|
39 |
+
iface = gr.Interface(
|
40 |
+
fn=predict_sentiment,
|
41 |
+
inputs=gr.Textbox(lines=4, placeholder="Enter an Urdu tweet here...", label="Urdu Tweet"),
|
42 |
+
outputs=gr.Textbox(label="Predicted Sentiment"),
|
43 |
+
title="Urdu Tweet Sentiment Analysis",
|
44 |
+
description="This app uses a fine-tuned transformer model to predict the sentiment of Urdu tweets. "
|
45 |
+
"Enter your tweet in the textbox below and click 'Submit' to see the prediction.",
|
46 |
+
examples=[
|
47 |
+
["السلام علیکم! آج کا دن بہت خوبصورت ہے۔"],
|
48 |
+
["میں بہت غمگین ہوں، دل بہت دکھ رہا ہے۔"],
|
49 |
+
["آپ کا کام بہت اچھا ہے!"]
|
50 |
+
]
|
51 |
+
)
|
52 |
+
|
53 |
+
# Launching the interface.
|
54 |
+
if __name__ == "__main__":
|
55 |
+
iface.launch()
|