from fastapi import FastAPI from transformers import BertForSequenceClassification, AutoTokenizer import torch import os MODEL_PATH = "hariharan220/finbert-stock-sentiment" # ✅ Load Hugging Face API token from environment variable HF_API_TOKEN = os.getenv("HF_API_KEY") # Change from HF_API_TOKEN to HF_API_KEY if not HF_API_TOKEN: raise ValueError("❌ ERROR: Hugging Face API Key (HF_API_KEY) is missing. Set it in your environment variables.") # ✅ Authenticate when loading model model = BertForSequenceClassification.from_pretrained( MODEL_PATH, use_auth_token=HF_API_TOKEN ) tokenizer = AutoTokenizer.from_pretrained( MODEL_PATH, use_auth_token=HF_API_TOKEN ) # ✅ Define sentiment labels labels = ["Negative", "Neutral", "Positive"] # ✅ Create FastAPI app app = FastAPI() @app.get("/") async def home(): return {"message": "Stock Sentiment Analysis API is running!"} @app.post("/predict") async def predict_sentiment(text: str): """Predicts sentiment of stock-related text using FinBERT""" inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True) with torch.no_grad(): outputs = model(**inputs) logits = outputs.logits prediction = torch.argmax(logits, dim=1).item() sentiment = labels[prediction] return {"text": text, "sentiment": sentiment}