EduConnect / app /api /userchat.py
dtyago's picture
Cleanup API responses
831a016
raw
history blame
639 Bytes
from fastapi import APIRouter, Depends, HTTPException, Body
from ..dependencies import get_current_user
from typing import Any
router = APIRouter()
@router.post("/user/chat")
async def chat_with_llama(user_input: str = Body(..., embed=True), current_user: Any = Depends(get_current_user)):
# Implement your logic to interact with LlamaV2 LLM here.
# Example response, replace with actual chat logic
chat_response = "Hello, how can I assist you today?"
return {
"response": chat_response,
"user_id": current_user["user_id"],
"name": current_user["name"],
"role": current_user["role"]
}