from fastapi import FastAPI, Request from fastapi.responses import JSONResponse from fastapi.middleware.cors import CORSMiddleware from pydantic import BaseModel from llmeval import LLM_as_Evaluator app = FastAPI() # CORS configuration origins = ["*"] # Allow all origins; specify domains in production app.add_middleware( CORSMiddleware, allow_origins=origins, # Allows all origins allow_credentials=True, allow_methods=["*"], # Allows all HTTP methods allow_headers=["*"], # Allows all headers ) le=LLM_as_Evaluator() # Pydantic model for request body class EvalInput(BaseModel): promptversion: str @app.post("/evaluate") async def evaluation(request:EvalInput): prompt_version = request.promptversion #prompt_version_splitted=prompt_version.split(":") #if prompt_version_splitted[0]=="paradigm_identifier": #le.Paradigm_LLM_Evaluator(prompt_version) #elif prompt_version_splitted[0]=="observational_biologist": try: le.LLM_Evaluator(prompt_version) #elif prompt_version_splitted[0]=="ontology_generator": # Example processing (replace with actual logic) return JSONResponse(content={"evalsuccessfull":True},status_code=200) except Exception as e: return JSONResponse(content={"evalsuccessfull":False},status_code=200)