from flask import Flask, request, render_template_string, jsonify
from transformers import AutoModelForSequenceClassification, XLMRobertaTokenizer
import torch
# Define the Flask app
flask_app = Flask(__name__)
# Load the pre-trained model and tokenizer
MODEL_NAME = "letijo03/xlm-r-shopee"
tokenizer = XLMRobertaTokenizer.from_pretrained(MODEL_NAME)
model = AutoModelForSequenceClassification.from_pretrained(MODEL_NAME)
model.eval() # Set the model to evaluation mode
def classify_sentiment(text):
inputs = tokenizer(text, return_tensors="pt", padding=True, truncation=True, max_length=512)
outputs = model(**inputs)
prediction = torch.argmax(outputs.logits, dim=-1)
return prediction.item()
# HTML template for user input
html_template = """
Comment Sentiment Analysis
Comment Sentiment Analysis
"""
@flask_app.route('/')
def index():
return render_template_string(html_template)
@flask_app.route('/analyze', methods=['POST'])
def analyze():
comment = request.form.get('comment')
if not comment or comment.strip() == "":
return jsonify({'error': 'Please provide a valid comment.'})
sentiment = classify_sentiment(comment)
sentiment_label = "Positive" if sentiment == 2 else "Neutral" if sentiment == 1 else "Negative"
return jsonify({'message': f'Sentiment analysis complete. The sentiment is: {sentiment_label}.'})
# Wrap the Flask app as an ASGI app so that the module-level variable 'app' is ASGI-compatible
from asgiref.wsgi import WsgiToAsgi
app = WsgiToAsgi(flask_app)
if __name__ == '__main__':
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=int(os.environ.get("PORT", 7860)))