Spaces:
Running
Running
from fastapi import FastAPI | |
from transformers import BertForSequenceClassification, AutoTokenizer | |
import torch | |
import os | |
MODEL_PATH = "hariharan220/finbert-stock-sentiment" | |
# β Load Hugging Face API token from environment variable | |
HF_API_TOKEN = os.getenv("HF_API_KEY") # Change from HF_API_TOKEN to HF_API_KEY | |
if not HF_API_TOKEN: | |
raise ValueError("β ERROR: Hugging Face API Key (HF_API_KEY) is missing. Set it in your environment variables.") | |
# β Authenticate when loading model | |
model = BertForSequenceClassification.from_pretrained( | |
MODEL_PATH, | |
use_auth_token=HF_API_TOKEN | |
) | |
tokenizer = AutoTokenizer.from_pretrained( | |
MODEL_PATH, | |
use_auth_token=HF_API_TOKEN | |
) | |
# β Define sentiment labels | |
labels = ["Negative", "Neutral", "Positive"] | |
# β Create FastAPI app | |
app = FastAPI() | |
async def home(): | |
return {"message": "Stock Sentiment Analysis API is running!"} | |
async def predict_sentiment(text: str): | |
"""Predicts sentiment of stock-related text using FinBERT""" | |
inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True) | |
with torch.no_grad(): | |
outputs = model(**inputs) | |
logits = outputs.logits | |
prediction = torch.argmax(logits, dim=1).item() | |
sentiment = labels[prediction] | |
return {"text": text, "sentiment": sentiment} | |