Spaces:
Running
Running
File size: 2,347 Bytes
3ec35ef 9150f8e 50553ea 9150f8e 3ec35ef 9150f8e 3ec35ef 9150f8e 3ec35ef 9150f8e 3ec35ef 9150f8e 50553ea 9150f8e 3ec35ef 9150f8e 3ec35ef 9150f8e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 |
import json
from trauma.api.chat.dto import EntityData
from trauma.api.message.ai.prompts import TraumaPrompts
from trauma.core.config import settings
from trauma.core.wrappers import openai_wrapper
@openai_wrapper(is_json=True)
async def update_entity_data_with_ai(entity_data: EntityData, user_message: str, assistant_message: str):
messages = [
{
"role": "system",
"content": TraumaPrompts.update_entity_data_with_ai
.replace("{entity_data}", entity_data.model_dump_json(indent=2))
.replace("{assistant_message}", assistant_message)
.replace("{user_message}", user_message)
}
]
return messages
@openai_wrapper(temperature=0.8)
async def generate_next_question(empty_field: str, instructions: str, user_message: str, message_history: list[dict]):
messages = [
{
"role": "system",
"content": TraumaPrompts.generate_next_question
.replace("{empty_field}", empty_field)
.replace("{instructions}", instructions)
},
*message_history,
{
"role": "user",
"content": user_message
}
]
return messages
@openai_wrapper(temperature=0.4)
async def generate_search_request(user_messages_str: str, entity_data: dict):
messages = [
{
"role": "system",
"content": TraumaPrompts.generate_search_request
.replace("{entity_data}", json.dumps(entity_data, indent=2))
.replace("{user_messages_str}", user_messages_str)
}
]
return messages
@openai_wrapper(temperature=0.4)
async def generate_final_response(final_entities: str, user_message: str, message_history: list[dict]):
messages = [
{
"role": "system",
"content": TraumaPrompts.generate_recommendation_decision
.replace("{final_entities}", final_entities)
},
*message_history,
{
"role": "user",
"content": user_message
}
]
return messages
async def convert_value_to_embeddings(value: str) -> list[float]:
embeddings = await settings.OPENAI_CLIENT.embeddings.create(
input=value,
model='text-embedding-3-large',
dimensions=1536,
)
return embeddings.data[0].embedding |