Deploy from GitHub main
Browse files
src/ui/compi_phase3_final_dashboard.py
CHANGED
@@ -1367,13 +1367,23 @@ with tab_gallery:
|
|
1367 |
if Path(p).exists():
|
1368 |
try:
|
1369 |
df = pd.read_csv(p)
|
1370 |
-
df
|
1371 |
-
|
|
|
1372 |
except Exception as e:
|
1373 |
st.warning(f"Failed reading {p}: {e}")
|
1374 |
if not frames:
|
1375 |
return pd.DataFrame(columns=["filepath"])
|
1376 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1377 |
|
1378 |
def scan_images():
|
1379 |
"""Scan output directory for images"""
|
@@ -1397,7 +1407,24 @@ with tab_gallery:
|
|
1397 |
imgs_df = scan_images()
|
1398 |
logs_df = read_logs()
|
1399 |
ann_df = load_annotations()
|
1400 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1401 |
|
1402 |
if meta_df.empty:
|
1403 |
st.info("No images found in outputs/. Generate some images first.")
|
@@ -1569,13 +1596,23 @@ with tab_export:
|
|
1569 |
if Path(p).exists():
|
1570 |
try:
|
1571 |
df = pd.read_csv(p)
|
1572 |
-
df
|
1573 |
-
|
|
|
1574 |
except Exception as e:
|
1575 |
st.warning(f"Read fail {p}: {e}")
|
1576 |
if not frames:
|
1577 |
return pd.DataFrame(columns=["filepath"])
|
1578 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1579 |
|
1580 |
def scan_imgs():
|
1581 |
"""Scan images for export"""
|
@@ -1591,7 +1628,23 @@ with tab_export:
|
|
1591 |
if imgs_df.empty:
|
1592 |
st.info("No images to export yet. Generate some images first.")
|
1593 |
else:
|
1594 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1595 |
|
1596 |
# Display available images
|
1597 |
st.markdown("### π Available Images")
|
|
|
1367 |
if Path(p).exists():
|
1368 |
try:
|
1369 |
df = pd.read_csv(p)
|
1370 |
+
if not df.empty and "filepath" in df.columns:
|
1371 |
+
df["source_log"] = Path(p).name
|
1372 |
+
frames.append(df)
|
1373 |
except Exception as e:
|
1374 |
st.warning(f"Failed reading {p}: {e}")
|
1375 |
if not frames:
|
1376 |
return pd.DataFrame(columns=["filepath"])
|
1377 |
+
try:
|
1378 |
+
combined = pd.concat(frames, ignore_index=True)
|
1379 |
+
# Extra safety: ensure filepath column exists before deduplication
|
1380 |
+
if "filepath" in combined.columns and len(combined) > 0:
|
1381 |
+
return combined.drop_duplicates(subset=["filepath"])
|
1382 |
+
else:
|
1383 |
+
return combined
|
1384 |
+
except Exception as e:
|
1385 |
+
st.warning(f"Failed concatenating logs: {e}")
|
1386 |
+
return pd.DataFrame(columns=["filepath"])
|
1387 |
|
1388 |
def scan_images():
|
1389 |
"""Scan output directory for images"""
|
|
|
1407 |
imgs_df = scan_images()
|
1408 |
logs_df = read_logs()
|
1409 |
ann_df = load_annotations()
|
1410 |
+
|
1411 |
+
# Safe merge with error handling - extra robust for Spaces
|
1412 |
+
try:
|
1413 |
+
if (not imgs_df.empty and not logs_df.empty and
|
1414 |
+
"filepath" in logs_df.columns and "filepath" in imgs_df.columns and
|
1415 |
+
len(logs_df) > 0 and len(imgs_df) > 0):
|
1416 |
+
meta_df = imgs_df.merge(logs_df, on="filepath", how="left")
|
1417 |
+
else:
|
1418 |
+
meta_df = imgs_df.copy()
|
1419 |
+
# Ensure basic columns exist
|
1420 |
+
if "filepath" not in meta_df.columns and not meta_df.empty:
|
1421 |
+
meta_df["filepath"] = meta_df.get("filename", "unknown")
|
1422 |
+
except Exception as e:
|
1423 |
+
st.warning(f"Merge failed, using image data only: {e}")
|
1424 |
+
meta_df = imgs_df.copy()
|
1425 |
+
# Ensure basic columns exist even after error
|
1426 |
+
if not meta_df.empty and "filepath" not in meta_df.columns:
|
1427 |
+
meta_df["filepath"] = meta_df.get("filename", "unknown")
|
1428 |
|
1429 |
if meta_df.empty:
|
1430 |
st.info("No images found in outputs/. Generate some images first.")
|
|
|
1596 |
if Path(p).exists():
|
1597 |
try:
|
1598 |
df = pd.read_csv(p)
|
1599 |
+
if not df.empty and "filepath" in df.columns:
|
1600 |
+
df["source_log"] = Path(p).name
|
1601 |
+
frames.append(df)
|
1602 |
except Exception as e:
|
1603 |
st.warning(f"Read fail {p}: {e}")
|
1604 |
if not frames:
|
1605 |
return pd.DataFrame(columns=["filepath"])
|
1606 |
+
try:
|
1607 |
+
combined = pd.concat(frames, ignore_index=True)
|
1608 |
+
# Extra safety: ensure filepath column exists before deduplication
|
1609 |
+
if "filepath" in combined.columns and len(combined) > 0:
|
1610 |
+
return combined.drop_duplicates(subset=["filepath"])
|
1611 |
+
else:
|
1612 |
+
return combined
|
1613 |
+
except Exception as e:
|
1614 |
+
st.warning(f"Failed concatenating export logs: {e}")
|
1615 |
+
return pd.DataFrame(columns=["filepath"])
|
1616 |
|
1617 |
def scan_imgs():
|
1618 |
"""Scan images for export"""
|
|
|
1628 |
if imgs_df.empty:
|
1629 |
st.info("No images to export yet. Generate some images first.")
|
1630 |
else:
|
1631 |
+
# Safe merge with error handling - extra robust for Spaces
|
1632 |
+
try:
|
1633 |
+
if (not imgs_df.empty and not logs_df.empty and
|
1634 |
+
"filepath" in logs_df.columns and "filepath" in imgs_df.columns and
|
1635 |
+
len(logs_df) > 0 and len(imgs_df) > 0):
|
1636 |
+
meta_df = imgs_df.merge(logs_df, on="filepath", how="left")
|
1637 |
+
else:
|
1638 |
+
meta_df = imgs_df.copy()
|
1639 |
+
# Ensure basic columns exist
|
1640 |
+
if "filepath" not in meta_df.columns and not meta_df.empty:
|
1641 |
+
meta_df["filepath"] = meta_df.get("filename", "unknown")
|
1642 |
+
except Exception as e:
|
1643 |
+
st.warning(f"Export merge failed, using image data only: {e}")
|
1644 |
+
meta_df = imgs_df.copy()
|
1645 |
+
# Ensure basic columns exist even after error
|
1646 |
+
if not meta_df.empty and "filepath" not in meta_df.columns:
|
1647 |
+
meta_df["filepath"] = meta_df.get("filename", "unknown")
|
1648 |
|
1649 |
# Display available images
|
1650 |
st.markdown("### π Available Images")
|