Spaces:
Sleeping
Sleeping
File size: 2,406 Bytes
4a50742 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 |
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from server.routes.threats import router as threats_router
from server.routes.models import router as models_router
from server.core.ml_manager import MLManager
import os
from dotenv import load_dotenv
import logging
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# Load environment variables
load_dotenv()
# Initialize ML models on startup
ml_manager = MLManager()
app = FastAPI(
title="SafeSpace AI API",
description="AI-powered threat detection and safety analysis",
version="2.0.0"
)
# Add ML manager to app state for dependency injection
app.state.ml_manager = ml_manager
# Configure CORS for Hugging Face Spaces
app.add_middleware(
CORSMiddleware,
allow_origins=[
"*", # Allow all origins for HF Spaces
"https://*.hf.space", # HF Spaces domains
"http://localhost:3000", # Local React app
"http://localhost:3001", # Local Node.js backend
"http://127.0.0.1:3000",
"http://127.0.0.1:3001"
],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Include routers
app.include_router(threats_router, prefix="/api/threats", tags=["threats"])
app.include_router(models_router, prefix="/api/models", tags=["models"])
@app.get("/")
async def root():
return {
"message": "SafeSpace AI API is running on Hugging Face Spaces",
"version": "2.0.0",
"models_status": ml_manager.get_status(),
"endpoints": {
"health": "/health",
"analyze_threat": "/api/threats/analyze",
"model_status": "/api/models/status",
"documentation": "/docs",
"openapi": "/openapi.json"
},
"usage": "Visit /docs for interactive API documentation"
}
@app.get("/health")
async def health_check():
return {
"status": "healthy",
"message": "SafeSpace AI API is operational",
"models_loaded": ml_manager.models_loaded
}
# Make ml_manager available globally
app.state.ml_manager = ml_manager
if __name__ == "__main__":
import uvicorn
# Use port 7860 for Hugging Face Spaces
port = int(os.environ.get("PORT", 7860))
uvicorn.run(app, host="0.0.0.0", port=port)
|