feat: debuging
Browse files- utils/llm_caller.py +2 -1
utils/llm_caller.py
CHANGED
@@ -155,6 +155,7 @@ class LLMCaller:
|
|
155 |
Interests: {', '.join(plan_request.interests) if plan_request.interests else 'Sightseeing'}
|
156 |
Transport: {plan_request.transportPref or 'Any'} | Stay: {plan_request.stayPref or 'Any'}
|
157 |
Dates: {plan_request.travelDates or 'Flexible'}
|
|
|
158 |
|
159 |
Context: {context_text[:4000]}{"..." if len(context_text) > 4000 else ""}
|
160 |
|
@@ -199,7 +200,7 @@ class LLMCaller:
|
|
199 |
|
200 |
# 6. Call LLM to generate structured trip plan
|
201 |
llm_response = await self.basic_query(user_prompt=llm_prompt, max_tokens=24048)
|
202 |
-
|
203 |
|
204 |
# 7. Parse LLM response as JSON
|
205 |
try:
|
|
|
155 |
Interests: {', '.join(plan_request.interests) if plan_request.interests else 'Sightseeing'}
|
156 |
Transport: {plan_request.transportPref or 'Any'} | Stay: {plan_request.stayPref or 'Any'}
|
157 |
Dates: {plan_request.travelDates or 'Flexible'}
|
158 |
+
*Provide a latitude and longitude for each place in timeline and spots.*.
|
159 |
|
160 |
Context: {context_text[:4000]}{"..." if len(context_text) > 4000 else ""}
|
161 |
|
|
|
200 |
|
201 |
# 6. Call LLM to generate structured trip plan
|
202 |
llm_response = await self.basic_query(user_prompt=llm_prompt, max_tokens=24048)
|
203 |
+
print(f"LLM Response: {llm_response}")
|
204 |
|
205 |
# 7. Parse LLM response as JSON
|
206 |
try:
|