Spaces:
Sleeping
Sleeping
import os | |
os.environ["HF_HOME"] = "/tmp/huggingface" | |
os.environ["MPLCONFIGDIR"] = "/tmp/mplconfig" | |
os.environ["HF_HOME"] = "/tmp" | |
os.makedirs("/tmp/huggingface", exist_ok=True) | |
os.makedirs("/tmp/mplconfig", exist_ok=True) | |
from transformers import AutoTokenizer, AutoModelForTokenClassification | |
os.environ["HF_HOME"] = "/tmp" | |
# Load the NER model | |
tokenizer = AutoTokenizer.from_pretrained("dslim/bert-base-NER") | |
model = AutoModelForTokenClassification.from_pretrained("dslim/bert-base-NER") | |
from fastapi import FastAPI, HTTPException | |
from pydantic import BaseModel | |
from transformers import pipeline | |
from fastapi.responses import StreamingResponse | |
import matplotlib | |
matplotlib.use('Agg') # β Force headless-safe backend | |
import matplotlib.pyplot as plt # β Safe to use now | |
import httpx | |
import io | |
import requests | |
import datetime | |
from io import BytesIO | |
import logging | |
# Configure logging | |
logging.basicConfig(level=logging.INFO) | |
logger = logging.getLogger(__name__) | |
# Create FastAPI app | |
app = FastAPI() | |
# Load transformers models | |
try: | |
ner_model = pipeline("ner", model="dslim/bert-base-NER", aggregation_strategy="simple") | |
sentiment_model = pipeline("sentiment-analysis", model="ProsusAI/finbert") | |
logger.info("Models loaded successfully.") | |
except Exception as e: | |
logger.error(f"Model loading failed: {e}") | |
ner_model = None | |
sentiment_model = None | |
# Request body schema for sentiment and NER | |
class TextRequest(BaseModel): | |
text: str | |
# Request body schema for chart | |
class CoinRequest(BaseModel): | |
coin_id: str | |
def home(): | |
return {"message": "Crypto News API is alive!"} | |
def analyze_sentiment(req: TextRequest): | |
if not sentiment_model: | |
raise HTTPException(status_code=503, detail="Sentiment model not available") | |
text = req.text.strip() | |
if not text: | |
raise HTTPException(status_code=400, detail="Text cannot be empty") | |
try: | |
result = sentiment_model(text[:512])[0] | |
return { | |
"label": result["label"], | |
"score": round(result["score"] * 100, 2) | |
} | |
except Exception as e: | |
logger.error(f"Sentiment analysis error: {e}") | |
raise HTTPException(status_code=500, detail="Sentiment analysis failed") | |
def analyze_ner(req: TextRequest): | |
if not ner_model: | |
raise HTTPException(status_code=503, detail="NER model not available") | |
text = req.text.strip() | |
if not text: | |
raise HTTPException(status_code=400, detail="Text cannot be empty") | |
try: | |
entities = ner_model(text[:512]) | |
relevant = [e['word'] for e in entities if e.get('entity_group') in ['ORG', 'PERSON', 'MISC', 'PRODUCT', 'GPE']] | |
unique_entities = list(dict.fromkeys(relevant))[:5] | |
return {"entities": unique_entities} | |
except Exception as e: | |
logger.error(f"NER analysis error: {e}") | |
raise HTTPException(status_code=500, detail="NER analysis failed") | |
def generate_chart(req: CoinRequest): | |
coin_id = req.coin_id.strip().lower() | |
logger.info(f"Generating chart for coin: {coin_id}") | |
try: | |
url = f"https://api.coingecko.com/api/v3/coins/{coin_id}/market_chart" | |
params = {"vs_currency": "usd", "days": "7"} | |
response = httpx.get(url, params=params) | |
if response.status_code != 200: | |
logger.error(f"CoinGecko API error: {response.text}") | |
raise HTTPException(status_code=502, detail="Failed to fetch coin data from CoinGecko") | |
prices = response.json()["prices"] | |
timestamps, values = zip(*prices) | |
plt.figure(figsize=(6, 3)) | |
plt.plot(values, color="blue") | |
plt.title(f"{coin_id.capitalize()} - Last 7 Days") | |
plt.xlabel("Time") | |
plt.ylabel("Price (USD)") | |
plt.grid(True) | |
buffer = io.BytesIO() | |
plt.savefig(buffer, format="png") | |
plt.close() | |
buffer.seek(0) | |
return StreamingResponse(buffer, media_type="image/png") | |
except Exception as e: | |
logger.exception(f"Chart generation error: {e}") | |
raise HTTPException(status_code=500, detail="Chart generation failed") |