aura-mind-glow / api_server.py
surfiniaburger's picture
update
f733284
# ==============================================================================
# Aura Mind Glow - Main Server (FastAPI + Gradio)
# ==============================================================================
"""
This script is the main entry point for the application. It launches a FastAPI
server that provides the diagnosis API and also serves the entire Gradio UI.
To run this server for development:
1. Make sure you have installed all packages from requirements.txt.
2. Run the command: uvicorn api_server:app --host 127.0.0.1 --port 7860
When deployed to Hugging Face Spaces, the Procfile will handle this command.
"""
# --- Essential Imports ---
from fastapi import FastAPI, UploadFile, File, HTTPException
from fastapi.responses import JSONResponse
from PIL import Image
import os
import warnings
import tempfile
import re
import io
import gradio as gr
# --- Import Core Components from Modules ---
# This setup is now shared between the API and the Gradio App
from vision_model import load_vision_model
from knowledge_base import KnowledgeBase
from agent_setup import initialize_adk
from bigquery_search import search_bigquery_for_remedy
from vector_store import embed_and_store_documents
# --- Import the Gradio UI from app.py ---
# We import the 'demo' object directly. The app.py script should not call demo.launch()
try:
from app import demo as gradio_app
print("βœ… Gradio UI imported successfully from app.py.")
except ImportError as e:
gradio_app = None
print(f"❌ CRITICAL: Could not import Gradio UI from app.py: {e}")
print("Ensure app.py defines a Gradio Blocks object named 'demo' and does not call .launch().")
print("βœ… All server libraries imported successfully.")
# --- Global Initialization ---
warnings.filterwarnings("ignore")
os.environ["TORCH_COMPILE_DISABLE"] = "1"
print("Performing initial setup for server (this may take a moment)...")
VISION_MODEL, PROCESSOR = load_vision_model()
KB = KnowledgeBase()
RETRIEVER = KB
embed_and_store_documents()
adk_components = initialize_adk(VISION_MODEL, PROCESSOR, RETRIEVER)
DIAGNOSIS_TOOL = adk_components["diagnosis_tool"] if adk_components else None
if not DIAGNOSIS_TOOL:
print("❌ CRITICAL: Diagnosis tool could not be initialized. The API will not work.")
print("βœ… Server setup complete.")
# --- FastAPI App and Endpoint Logic ---
app = FastAPI(
title="Aura Mind Glow API",
description="Provides access to the plant diagnosis model and serves the Gradio UI.",
version="1.0.0",
)
def run_diagnosis_logic(image: Image.Image):
"""
Core logic for running diagnosis and getting remedies.
"""
temp_file_path = None
try:
with tempfile.NamedTemporaryFile(delete=False, suffix=".png") as temp_file:
image.save(temp_file.name)
temp_file_path = temp_file.name
diagnosis = DIAGNOSIS_TOOL(temp_file_path)
if "Could not parse" in diagnosis:
return {"error": f"Could not identify condition: {diagnosis}"}
cleaned_diagnosis = re.sub(r'[^\w\s.\\-,\"]', '', diagnosis)
cleaned_diagnosis = re.sub(r'\s+', ' ', cleaned_diagnosis).strip()
local_remedy_list = search_documents(cleaned_diagnosis)
local_remedy = local_remedy_list[0] if local_remedy_list else "No remedy found in local knowledge base."
search_query = "healthy maize" if "healthy" in cleaned_diagnosis.lower() else "phosphorus" if "phosphorus" in cleaned_diagnosis.lower() else "general"
cloud_remedy = search_bigquery_for_remedy(search_query)
return {
"diagnosis": diagnosis,
"remedy_local": local_remedy,
"remedy_cloud": cloud_remedy
}
finally:
if temp_file_path:
os.remove(temp_file_path)
@app.post("/diagnose/", tags=["Diagnosis"])
async def diagnose_endpoint(file: UploadFile = File(...)):
"""
Receives an image file, performs diagnosis, and returns the result as JSON.
"""
if not file.content_type.startswith('image/'):
raise HTTPException(status_code=400, detail="File provided is not an image.")
try:
image_bytes = await file.read()
image = Image.open(io.BytesIO(image_bytes))
result = run_diagnosis_logic(image)
if "error" in result:
raise HTTPException(status_code=500, detail=result["error"])
return JSONResponse(content=result)
except Exception as e:
print(f"❌ API Error: {e}")
raise HTTPException(status_code=500, detail=f"An internal server error occurred: {e}")
# --- Mount the Gradio App ---
if gradio_app:
app = gr.mount_gradio_app(app, gradio_app, path="/")
print("βœ… Gradio UI has been mounted on the FastAPI server at the root path '/'.")
# Note: The 'if __name__ == "__main__":' block with uvicorn.run() is removed.
# The Procfile will be used by Hugging Face to run the server.