File size: 3,993 Bytes
e7abd9e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 |
from fastapi import APIRouter, HTTPException, Depends
from typing import Dict, Any, List
import logging
from app.services.models import ModelService
from app.api.dependencies import get_model_service
from app.core.fastapi_cache import cached
from app.utils.logging import LogFormatter
logger = logging.getLogger(__name__)
router = APIRouter(tags=["models"])
@router.get("/status")
@cached(expire=300)
async def get_models_status(
model_service: ModelService = Depends(get_model_service)
) -> Dict[str, List[Dict[str, Any]]]:
"""Get all models grouped by status"""
try:
logger.info(LogFormatter.info("Fetching status for all models"))
result = await model_service.get_models()
stats = {
status: len(models) for status, models in result.items()
}
for line in LogFormatter.stats(stats, "Models by Status"):
logger.info(line)
return result
except Exception as e:
logger.error(LogFormatter.error("Failed to get models status", e))
raise HTTPException(status_code=500, detail=str(e))
@router.get("/pending")
@cached(expire=60)
async def get_pending_models(
model_service: ModelService = Depends(get_model_service)
) -> List[Dict[str, Any]]:
"""Get all models waiting for evaluation"""
try:
logger.info(LogFormatter.info("Fetching pending models"))
models = await model_service.get_models()
pending = models.get("pending", [])
logger.info(LogFormatter.success(f"Found {len(pending)} pending models"))
return pending
except Exception as e:
logger.error(LogFormatter.error("Failed to get pending models", e))
raise HTTPException(status_code=500, detail=str(e))
@router.post("/submit")
async def submit_model(
model_data: Dict[str, Any],
model_service: ModelService = Depends(get_model_service)
) -> Dict[str, Any]:
try:
logger.info(LogFormatter.section("MODEL SUBMISSION"))
user_id = model_data.pop('user_id', None)
if not user_id:
error_msg = "user_id is required"
logger.error(LogFormatter.error("Validation failed", error_msg))
raise ValueError(error_msg)
# Log submission details
submission_info = {
"Model_ID": model_data.get("model_id"),
"User": user_id,
"Base_Model": model_data.get("base_model"),
"Precision": model_data.get("precision"),
"Model_Type": model_data.get("model_type")
}
for line in LogFormatter.tree(submission_info, "Submission Details"):
logger.info(line)
result = await model_service.submit_model(model_data, user_id)
logger.info(LogFormatter.success("Model submitted successfully"))
return result
except ValueError as e:
logger.error(LogFormatter.error("Invalid submission data", e))
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
logger.error(LogFormatter.error("Submission failed", e))
raise HTTPException(status_code=500, detail=str(e))
@router.get("/{model_id}/status")
async def get_model_status(
model_id: str,
model_service: ModelService = Depends(get_model_service)
) -> Dict[str, Any]:
try:
logger.info(LogFormatter.info(f"Checking status for model: {model_id}"))
status = await model_service.get_model_status(model_id)
if status["status"] != "not_found":
logger.info(LogFormatter.success("Status found"))
for line in LogFormatter.tree(status, "Model Status"):
logger.info(line)
else:
logger.warning(LogFormatter.warning(f"No status found for model: {model_id}"))
return status
except Exception as e:
logger.error(LogFormatter.error("Failed to get model status", e))
raise HTTPException(status_code=500, detail=str(e)) |