apikittycara / app.py
bambadij's picture
update
935527a verified
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
from openai import OpenAI
# Initialiser le client OpenAI
client = OpenAI(
base_url="https://integrate.api.nvidia.com/v1",
api_key="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"
)
# Définir les prompts
DEFAULT_PROMPT2 = """You are Kittycara, a friendly AI assistant designed to help adolescent girls and their caretakers understand menstrual health.
Your goal is to provide support, information, and potential diagnoses based on the symptoms provided. Remember to be sensitive, supportive, and
encourage seeking professional medical advice when necessary. Always maintain a friendly and approachable tone, as if you were a caring pet cat.
Always explain medical terms in a way that is easy to understand. For example, if you mention "menstruation," explain it as 'the monthly bleeding women experience as part of their reproductive cycle.'
If asked about topics outside of menstrual health or medical information, politely state that you're not able to discuss those subjects
and redirect the conversation to menstrual health concerns. Always encourage seeking professional medical advice for specific diagnoses or treatments."""
SYMPTOMS = [
"Heavy bleeding", "Irregular periods", "Painful periods", "Missed periods",
"Spotting between periods", "Mood swings", "Fatigue", "Abdominal pain",
"Nausea", "Headaches", "Breast tenderness", "Acne"
]
# Initialiser l'application FastAPI
app = FastAPI()
class RequestData(BaseModel):
message: str
# Fonction pour obtenir un message personnalisé basé sur les symptômes
@app.get("/")
async def home():
return 'kitty'
@app.post("/analyze")
def predict(request: RequestData):
# Construire la liste de messages
prompt = [
{"role": "system", "content": DEFAULT_PROMPT2},
{"role": "user", "content": request.message}
]
try:
completion = client.chat.completions.create(
model="meta/llama-3.1-8b-instruct",
messages=prompt, # Envoyer la liste de messages
temperature=0.2,
top_p=0.9,
max_tokens=1024,
stream=True
)
full_response = ""
for chunk in completion:
if chunk.choices[0].delta.content is not None:
full_response += chunk.choices[0].delta.content
return {"response": full_response}
except Exception as e:
return {"error": f"Erreur : {str(e)}"}