MedLLM-Assistant / test /test_llm.py
VuvanAn's picture
Upload 47 files
09dc9d3 verified
raw
history blame contribute delete
320 Bytes
from ..rag_pipeline import ChatAssistant
from ..rag_pipeline import request_retrieve_prompt
cb = ChatAssistant("mistral-medium", "mistral")
query = "Beta blocker for hypertension"
query = request_retrieve_prompt.format(conversation=query, role="customer")
response = cb.get_response(user=query)
print(response)