Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -46,13 +46,14 @@ def get_pipeline():
|
|
46 |
|
47 |
|
48 |
# β
AI Function - Processes and ranks workouts
|
49 |
-
|
50 |
def analyze_workouts(last_workouts: str):
|
51 |
pipe = get_pipeline()
|
52 |
if pipe is None:
|
53 |
-
|
|
|
54 |
|
55 |
if not last_workouts.strip():
|
|
|
56 |
return "β No workout data provided."
|
57 |
|
58 |
instruction = (
|
@@ -60,7 +61,7 @@ def analyze_workouts(last_workouts: str):
|
|
60 |
f"\n\n{last_workouts}\n\nOnly return rankings. No extra text."
|
61 |
)
|
62 |
|
63 |
-
print(f"π¨ Sending prompt to AI: {instruction}")
|
64 |
|
65 |
try:
|
66 |
result = pipe(instruction, max_new_tokens=200, temperature=0.3, top_p=0.9)
|
@@ -69,11 +70,11 @@ def analyze_workouts(last_workouts: str):
|
|
69 |
return "β AI did not return a valid response."
|
70 |
|
71 |
response_text = result[0]["generated_text"].strip()
|
72 |
-
print(f"π AI Response: {response_text}")
|
73 |
|
74 |
return response_text
|
75 |
except Exception as e:
|
76 |
-
print(f"β AI Error: {str(e)}")
|
77 |
return f"β Error: {str(e)}"
|
78 |
|
79 |
|
@@ -93,20 +94,13 @@ async def process_workout_request(request: Request):
|
|
93 |
|
94 |
response_text = analyze_workouts(last_workouts)
|
95 |
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
try:
|
102 |
-
webhook_response = await client.post(WEBHOOK_URL, json={"event_id": event_id, "data": [response_text]})
|
103 |
-
webhook_response.raise_for_status()
|
104 |
-
print(f"β
Webhook sent successfully: {webhook_response.json()}")
|
105 |
-
except Exception as e:
|
106 |
-
print(f"β οΈ Webhook failed: {e}")
|
107 |
-
print("π Switching to Polling Mode...")
|
108 |
|
109 |
-
return {"event_id": event_id}
|
110 |
|
111 |
except Exception as e:
|
112 |
print(f"β Error processing request: {e}")
|
@@ -117,8 +111,13 @@ async def process_workout_request(request: Request):
|
|
117 |
@app.get("/gradio_api/poll/{event_id}")
|
118 |
async def poll(event_id: str):
|
119 |
"""Fetches stored AI response for a given event ID."""
|
|
|
|
|
120 |
if event_id in event_store:
|
|
|
121 |
return {"data": [event_store.pop(event_id)]}
|
|
|
|
|
122 |
return {"detail": "Not Found"}
|
123 |
|
124 |
|
@@ -128,25 +127,6 @@ async def root():
|
|
128 |
return {"message": "Workout Analysis & Ranking AI is running!"}
|
129 |
|
130 |
|
131 |
-
# β
|
132 |
-
iface = gr.Interface(
|
133 |
-
fn=analyze_workouts,
|
134 |
-
inputs="text",
|
135 |
-
outputs="text",
|
136 |
-
title="Workout Analysis & Ranking AI",
|
137 |
-
description="Enter workout data to analyze effectiveness, rank workouts, and receive improvement recommendations."
|
138 |
-
)
|
139 |
-
|
140 |
-
|
141 |
-
# β
Start Both FastAPI & Gradio
|
142 |
-
def start_gradio():
|
143 |
-
iface.launch(server_name="0.0.0.0", server_port=7860, share=True)
|
144 |
-
|
145 |
-
def start_fastapi():
|
146 |
-
uvicorn.run(app, host="0.0.0.0", port=7861)
|
147 |
-
|
148 |
-
# β
Run both servers in parallel
|
149 |
if __name__ == "__main__":
|
150 |
-
|
151 |
-
threading.Thread(target=start_gradio).start()
|
152 |
-
threading.Thread(target=start_fastapi).start()
|
|
|
46 |
|
47 |
|
48 |
# β
AI Function - Processes and ranks workouts
|
|
|
49 |
def analyze_workouts(last_workouts: str):
|
50 |
pipe = get_pipeline()
|
51 |
if pipe is None:
|
52 |
+
print("β AI model is not loaded.")
|
53 |
+
return "β AI model not loaded."
|
54 |
|
55 |
if not last_workouts.strip():
|
56 |
+
print("β Empty workout data received!")
|
57 |
return "β No workout data provided."
|
58 |
|
59 |
instruction = (
|
|
|
61 |
f"\n\n{last_workouts}\n\nOnly return rankings. No extra text."
|
62 |
)
|
63 |
|
64 |
+
print(f"π¨ Sending prompt to AI: {instruction}")
|
65 |
|
66 |
try:
|
67 |
result = pipe(instruction, max_new_tokens=200, temperature=0.3, top_p=0.9)
|
|
|
70 |
return "β AI did not return a valid response."
|
71 |
|
72 |
response_text = result[0]["generated_text"].strip()
|
73 |
+
print(f"π AI Response: {response_text}")
|
74 |
|
75 |
return response_text
|
76 |
except Exception as e:
|
77 |
+
print(f"β AI Error: {str(e)}")
|
78 |
return f"β Error: {str(e)}"
|
79 |
|
80 |
|
|
|
94 |
|
95 |
response_text = analyze_workouts(last_workouts)
|
96 |
|
97 |
+
if response_text and response_text.strip():
|
98 |
+
event_store[event_id] = response_text
|
99 |
+
print(f"π Stored event: {event_id} β {response_text}")
|
100 |
+
else:
|
101 |
+
print("β AI did not generate a valid response. Not storing event.")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
102 |
|
103 |
+
return {"event_id": event_id}
|
104 |
|
105 |
except Exception as e:
|
106 |
print(f"β Error processing request: {e}")
|
|
|
111 |
@app.get("/gradio_api/poll/{event_id}")
|
112 |
async def poll(event_id: str):
|
113 |
"""Fetches stored AI response for a given event ID."""
|
114 |
+
print(f"π Polling event ID: {event_id}")
|
115 |
+
|
116 |
if event_id in event_store:
|
117 |
+
print(f"β
Returning stored response: {event_store[event_id]}")
|
118 |
return {"data": [event_store.pop(event_id)]}
|
119 |
+
|
120 |
+
print("β Event ID not found in event_store")
|
121 |
return {"detail": "Not Found"}
|
122 |
|
123 |
|
|
|
127 |
return {"message": "Workout Analysis & Ranking AI is running!"}
|
128 |
|
129 |
|
130 |
+
# β
Start FastAPI
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
131 |
if __name__ == "__main__":
|
132 |
+
uvicorn.run(app, host="0.0.0.0", port=7861)
|
|
|
|