Spaces:
Sleeping
Sleeping
jocko
commited on
Commit
Β·
317f15f
1
Parent(s):
70dae1f
fix image similarity detection
Browse files- src/streamlit_app.py +35 -36
src/streamlit_app.py
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
# β
Cache-Safe Multimodal App
|
3 |
# ================================
|
4 |
|
5 |
-
import os
|
6 |
|
7 |
# ====== Force all cache dirs to /tmp (writable in most environments) ======
|
8 |
CACHE_BASE = "/tmp/cache"
|
@@ -13,6 +13,11 @@ os.environ["HF_DATASETS_CACHE"] = f"{CACHE_BASE}/hf_datasets"
|
|
13 |
os.environ["TORCH_HOME"] = f"{CACHE_BASE}/torch"
|
14 |
os.environ["STREAMLIT_CACHE_DIR"] = f"{CACHE_BASE}/streamlit_cache"
|
15 |
os.environ["STREAMLIT_STATIC_DIR"] = f"{CACHE_BASE}/streamlit_static"
|
|
|
|
|
|
|
|
|
|
|
16 |
|
17 |
# Create the directories before imports
|
18 |
for path in os.environ.values():
|
@@ -30,13 +35,6 @@ import openai
|
|
30 |
import comet_llm
|
31 |
from opik import track
|
32 |
|
33 |
-
os.environ["STREAMLIT_CONFIG_DIR"] = "/tmp/.streamlit"
|
34 |
-
os.environ["STREAMLIT_CACHE_DIR"] = f"{CACHE_BASE}/streamlit_cache"
|
35 |
-
os.environ["STREAMLIT_STATIC_DIR"] = f"{CACHE_BASE}/streamlit_static"
|
36 |
-
|
37 |
-
os.makedirs("/tmp/.streamlit", exist_ok=True)
|
38 |
-
|
39 |
-
|
40 |
# ========== π API Key ==========
|
41 |
openai.api_key = os.getenv("OPENAI_API_KEY")
|
42 |
os.environ["OPIK_API_KEY"] = os.getenv("OPIK_API_KEY")
|
@@ -126,8 +124,8 @@ TEXT_COLUMN = "complaints" # or "general_complaint", depending on your needs
|
|
126 |
# ========== π§ββοΈ App UI ==========
|
127 |
st.title("π©Ί Multimodal Medical Chatbot")
|
128 |
|
129 |
-
|
130 |
-
|
131 |
|
132 |
@track
|
133 |
def get_chat_completion_openai(client, prompt: str):
|
@@ -150,33 +148,34 @@ def get_similar_prompt(query):
|
|
150 |
|
151 |
|
152 |
|
153 |
-
|
154 |
-
|
155 |
-
|
156 |
-
|
157 |
-
#
|
158 |
-
|
159 |
-
|
160 |
-
#
|
161 |
-
|
162 |
-
|
163 |
-
#
|
164 |
-
|
165 |
-
|
166 |
-
#
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
if
|
178 |
with st.spinner("Searching medical cases..."):
|
179 |
-
print(
|
|
|
180 |
st.write(f'uploading file {uploaded_file.name}')
|
181 |
query_image = Image.open(uploaded_file).convert("RGB")
|
182 |
st.image(query_image, caption="Your uploaded image", use_container_width=True)
|
|
|
2 |
# β
Cache-Safe Multimodal App
|
3 |
# ================================
|
4 |
|
5 |
+
import shutil, os
|
6 |
|
7 |
# ====== Force all cache dirs to /tmp (writable in most environments) ======
|
8 |
CACHE_BASE = "/tmp/cache"
|
|
|
13 |
os.environ["TORCH_HOME"] = f"{CACHE_BASE}/torch"
|
14 |
os.environ["STREAMLIT_CACHE_DIR"] = f"{CACHE_BASE}/streamlit_cache"
|
15 |
os.environ["STREAMLIT_STATIC_DIR"] = f"{CACHE_BASE}/streamlit_static"
|
16 |
+
os.environ["STREAMLIT_CONFIG_DIR"] = "/tmp/.streamlit"
|
17 |
+
|
18 |
+
# Create the directories before imports
|
19 |
+
os.makedirs(os.environ["STREAMLIT_CONFIG_DIR"], exist_ok=True)
|
20 |
+
shutil.copyfile(".streamlit/config.toml", "/tmp/.streamlit/config.toml")
|
21 |
|
22 |
# Create the directories before imports
|
23 |
for path in os.environ.values():
|
|
|
35 |
import comet_llm
|
36 |
from opik import track
|
37 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
38 |
# ========== π API Key ==========
|
39 |
openai.api_key = os.getenv("OPENAI_API_KEY")
|
40 |
os.environ["OPIK_API_KEY"] = os.getenv("OPIK_API_KEY")
|
|
|
124 |
# ========== π§ββοΈ App UI ==========
|
125 |
st.title("π©Ί Multimodal Medical Chatbot")
|
126 |
|
127 |
+
query = st.text_input("Enter your medical question or symptom description:")
|
128 |
+
uploaded_files = st.file_uploader("Upload an image to find similar medical cases:", type=["png", "jpg", "jpeg"], accept_multiple_files=True)
|
129 |
|
130 |
@track
|
131 |
def get_chat_completion_openai(client, prompt: str):
|
|
|
148 |
|
149 |
|
150 |
|
151 |
+
if query:
|
152 |
+
with st.spinner("Searching medical cases..."):
|
153 |
+
|
154 |
+
|
155 |
+
# Compute similarity
|
156 |
+
selected = get_similar_prompt(query)
|
157 |
+
|
158 |
+
# Show Image
|
159 |
+
st.image(selected['image'], caption="Most relevant medical image", use_container_width=True)
|
160 |
+
|
161 |
+
# Show Text
|
162 |
+
st.markdown(f"**Case Description:** {selected[TEXT_COLUMN]}")
|
163 |
+
|
164 |
+
# GPT Explanation
|
165 |
+
if openai.api_key:
|
166 |
+
prompt = f"Explain this case in plain English: {selected[TEXT_COLUMN]}"
|
167 |
+
|
168 |
+
explanation = get_chat_completion_openai(client, prompt)
|
169 |
+
explanation = explanation.choices[0].message.content
|
170 |
+
|
171 |
+
st.markdown(f"### π€ Explanation by GPT:\n{explanation}")
|
172 |
+
else:
|
173 |
+
st.warning("OpenAI API key not found. Please set OPENAI_API_KEY as a secret environment variable.")
|
174 |
+
|
175 |
+
if uploaded_files:
|
176 |
with st.spinner("Searching medical cases..."):
|
177 |
+
print(uploaded_files)
|
178 |
+
uploaded_file = uploaded_files[0]
|
179 |
st.write(f'uploading file {uploaded_file.name}')
|
180 |
query_image = Image.open(uploaded_file).convert("RGB")
|
181 |
st.image(query_image, caption="Your uploaded image", use_container_width=True)
|