Update TrendAnalysis.py
Browse files- TrendAnalysis.py +73 -99
TrendAnalysis.py
CHANGED
|
@@ -279,7 +279,8 @@ def perform_trend_analysis(df):
|
|
| 279 |
return df, topic_labels
|
| 280 |
|
| 281 |
|
| 282 |
-
def build_dashboard(df, titleNm, topic_year):
|
|
|
|
| 283 |
TitleName = titleNm + "_" + topic_year
|
| 284 |
color_palette = px.colors.qualitative.Vivid
|
| 285 |
unique_topics = sorted(df["topic"].unique())
|
|
@@ -291,7 +292,8 @@ def build_dashboard(df, titleNm, topic_year):
|
|
| 291 |
# Calculate the number of papers in each cluster
|
| 292 |
cluster_sizes = df.groupby("topic").size().reset_index(name="paper_count")
|
| 293 |
df = df.merge(cluster_sizes, on="topic", how="left")
|
| 294 |
-
|
|
|
|
| 295 |
# Improved marker scaling with a better range
|
| 296 |
min_size = 50
|
| 297 |
max_size = 140
|
|
@@ -338,8 +340,7 @@ def build_dashboard(df, titleNm, topic_year):
|
|
| 338 |
|
| 339 |
df["theme"] = df.apply(classify_theme, axis=1)
|
| 340 |
|
| 341 |
-
|
| 342 |
-
app = dash.Dash(__name__, external_stylesheets=[dbc.themes.DARKLY]) # DARKLY for a sleek dark theme
|
| 343 |
|
| 344 |
# Create a more visually appealing figure
|
| 345 |
fig = go.Figure()
|
|
@@ -903,92 +904,11 @@ def build_dashboard(df, titleNm, topic_year):
|
|
| 903 |
return app
|
| 904 |
|
| 905 |
|
| 906 |
-
# Global variables to track Dash app state
|
| 907 |
-
dash_thread = None
|
| 908 |
-
dash_app = None
|
| 909 |
-
DASH_PORT = 7860
|
| 910 |
-
|
| 911 |
-
|
| 912 |
-
# Simplified shutdown function that doesn't rely on request or psutil connections
|
| 913 |
-
def shutdown_dash_app():
|
| 914 |
-
global dash_thread, dash_app
|
| 915 |
-
|
| 916 |
-
if dash_app is not None:
|
| 917 |
-
try:
|
| 918 |
-
print("Shutting down previous Dash app...")
|
| 919 |
-
|
| 920 |
-
# If we have a Dash app with a server
|
| 921 |
-
if hasattr(dash_app, 'server'):
|
| 922 |
-
# Set a shutdown flag
|
| 923 |
-
dash_app._shutdown = True
|
| 924 |
-
|
| 925 |
-
# Force the thread to terminate
|
| 926 |
-
if dash_thread and dash_thread.is_alive():
|
| 927 |
-
import ctypes
|
| 928 |
-
ctypes.pythonapi.PyThreadState_SetAsyncExc(
|
| 929 |
-
ctypes.c_long(dash_thread.ident),
|
| 930 |
-
ctypes.py_object(SystemExit)
|
| 931 |
-
)
|
| 932 |
-
dash_thread.join(timeout=2)
|
| 933 |
-
|
| 934 |
-
# Try to find and kill the process using the port
|
| 935 |
-
try:
|
| 936 |
-
import psutil
|
| 937 |
-
import os
|
| 938 |
-
import signal
|
| 939 |
-
|
| 940 |
-
for proc in psutil.process_iter(['pid']):
|
| 941 |
-
try:
|
| 942 |
-
for conn in proc.connections(kind='inet'):
|
| 943 |
-
if conn.laddr.port == DASH_PORT:
|
| 944 |
-
print(f"Killing process {proc.pid} using port {DASH_PORT}")
|
| 945 |
-
os.kill(proc.pid, signal.SIGTERM)
|
| 946 |
-
except:
|
| 947 |
-
pass
|
| 948 |
-
except:
|
| 949 |
-
print("Could not find process using port")
|
| 950 |
-
|
| 951 |
-
# Clear references
|
| 952 |
-
dash_app = None
|
| 953 |
-
print("Previous Dash app successfully shut down")
|
| 954 |
-
return True
|
| 955 |
-
|
| 956 |
-
except Exception as e:
|
| 957 |
-
print(f"Error shutting down Dash app: {e}")
|
| 958 |
-
# Even if there were errors, reset the state
|
| 959 |
-
dash_app = None
|
| 960 |
-
return True
|
| 961 |
-
|
| 962 |
-
return True # No app to shut down
|
| 963 |
-
|
| 964 |
-
|
| 965 |
-
# Updated function to run Dash with error handling
|
| 966 |
-
def run_dash(df, titleNm, Topic_year):
|
| 967 |
-
global dash_app
|
| 968 |
-
|
| 969 |
-
try:
|
| 970 |
-
# Build the dashboard
|
| 971 |
-
dash_app = build_dashboard(df, titleNm, Topic_year)
|
| 972 |
-
|
| 973 |
-
# Run the server
|
| 974 |
-
dash_app.run_server(debug=False, port=DASH_PORT, use_reloader=False)
|
| 975 |
-
except Exception as e:
|
| 976 |
-
print(f"Error running Dash app: {e}")
|
| 977 |
-
dash_app = None
|
| 978 |
-
|
| 979 |
-
|
| 980 |
-
# Update your endpoint - removed request parameter from shutdown_dash_app
|
| 981 |
@router.post("/analyze-trends/")
|
| 982 |
async def analyze_trends(request: Request, data_request: TrendAnalysisRequest):
|
| 983 |
global dash_thread
|
| 984 |
TitleName = data_request.topic
|
| 985 |
Topic_year = data_request.year
|
| 986 |
-
# First, ensure any existing dashboard is properly shut down
|
| 987 |
-
shutdown_dash_app()
|
| 988 |
-
|
| 989 |
-
# Short delay to ensure port is freed
|
| 990 |
-
import time
|
| 991 |
-
time.sleep(1)
|
| 992 |
|
| 993 |
# Fetch and process data
|
| 994 |
df, current_page, total_pages, papers_count, total_papers = await fetch_papers_with_pagination(
|
|
@@ -1015,17 +935,36 @@ async def analyze_trends(request: Request, data_request: TrendAnalysisRequest):
|
|
| 1015 |
|
| 1016 |
# Create cluster statistics
|
| 1017 |
cluster_sizes = df.groupby("topic").size().to_dict()
|
| 1018 |
-
|
| 1019 |
-
#
|
| 1020 |
-
|
| 1021 |
-
|
| 1022 |
-
|
| 1023 |
-
|
| 1024 |
-
|
| 1025 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1026 |
browser_thread.daemon = True
|
| 1027 |
browser_thread.start()
|
| 1028 |
-
|
| 1029 |
return {
|
| 1030 |
"message": f"Trend analysis completed for papers (page {current_page + 1} of {total_pages})",
|
| 1031 |
"current_page": current_page,
|
|
@@ -1034,11 +973,46 @@ async def analyze_trends(request: Request, data_request: TrendAnalysisRequest):
|
|
| 1034 |
"total_papers": total_papers,
|
| 1035 |
"cluster_sizes": cluster_sizes,
|
| 1036 |
"cluster_titles": topic_labels,
|
| 1037 |
-
"dashboard_url":
|
|
|
|
| 1038 |
}
|
| 1039 |
|
| 1040 |
|
| 1041 |
-
#
|
| 1042 |
-
|
| 1043 |
-
|
| 1044 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 279 |
return df, topic_labels
|
| 280 |
|
| 281 |
|
| 282 |
+
def build_dashboard(df, titleNm, topic_year,existing_app=None):
|
| 283 |
+
global dash_app
|
| 284 |
TitleName = titleNm + "_" + topic_year
|
| 285 |
color_palette = px.colors.qualitative.Vivid
|
| 286 |
unique_topics = sorted(df["topic"].unique())
|
|
|
|
| 292 |
# Calculate the number of papers in each cluster
|
| 293 |
cluster_sizes = df.groupby("topic").size().reset_index(name="paper_count")
|
| 294 |
df = df.merge(cluster_sizes, on="topic", how="left")
|
| 295 |
+
app = existing_app if existing_app else dash.Dash(__name__, external_stylesheets=[dbc.themes.DARKLY])
|
| 296 |
+
|
| 297 |
# Improved marker scaling with a better range
|
| 298 |
min_size = 50
|
| 299 |
max_size = 140
|
|
|
|
| 340 |
|
| 341 |
df["theme"] = df.apply(classify_theme, axis=1)
|
| 342 |
|
| 343 |
+
|
|
|
|
| 344 |
|
| 345 |
# Create a more visually appealing figure
|
| 346 |
fig = go.Figure()
|
|
|
|
| 904 |
return app
|
| 905 |
|
| 906 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 907 |
@router.post("/analyze-trends/")
|
| 908 |
async def analyze_trends(request: Request, data_request: TrendAnalysisRequest):
|
| 909 |
global dash_thread
|
| 910 |
TitleName = data_request.topic
|
| 911 |
Topic_year = data_request.year
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 912 |
|
| 913 |
# Fetch and process data
|
| 914 |
df, current_page, total_pages, papers_count, total_papers = await fetch_papers_with_pagination(
|
|
|
|
| 935 |
|
| 936 |
# Create cluster statistics
|
| 937 |
cluster_sizes = df.groupby("topic").size().to_dict()
|
| 938 |
+
|
| 939 |
+
# Build the dashboard
|
| 940 |
+
from app import get_or_create_dash_app
|
| 941 |
+
dash_app = get_or_create_dash_app()
|
| 942 |
+
|
| 943 |
+
# Build the dashboard using existing dash_app
|
| 944 |
+
updated_dash_app = build_dashboard(df, TitleName, Topic_year if Topic_year else "", existing_app=dash_app)
|
| 945 |
+
|
| 946 |
+
# Update the global dash_app in the main app
|
| 947 |
+
from app import dash_app as main_dash_app
|
| 948 |
+
main_dash_app.layout = updated_dash_app.layout
|
| 949 |
+
|
| 950 |
+
# Add dashboard_path to the response
|
| 951 |
+
dashboard_path = f"/dash"
|
| 952 |
+
|
| 953 |
+
# Get base URL from request and build complete URL
|
| 954 |
+
base_url = str(request.base_url)
|
| 955 |
+
dashboard_url = f"{base_url}dash"
|
| 956 |
+
|
| 957 |
+
# Open browser in a new thread
|
| 958 |
+
def open_browser():
|
| 959 |
+
import webbrowser
|
| 960 |
+
webbrowser.open(dashboard_url,new=2)
|
| 961 |
+
|
| 962 |
+
# Start a thread to open the browser after a short delay
|
| 963 |
+
import threading
|
| 964 |
+
browser_thread = threading.Timer(1.5, open_browser)
|
| 965 |
browser_thread.daemon = True
|
| 966 |
browser_thread.start()
|
| 967 |
+
|
| 968 |
return {
|
| 969 |
"message": f"Trend analysis completed for papers (page {current_page + 1} of {total_pages})",
|
| 970 |
"current_page": current_page,
|
|
|
|
| 973 |
"total_papers": total_papers,
|
| 974 |
"cluster_sizes": cluster_sizes,
|
| 975 |
"cluster_titles": topic_labels,
|
| 976 |
+
"dashboard_url": dashboard_url,
|
| 977 |
+
"redirect": True # Add a flag to indicate redirect is needed
|
| 978 |
}
|
| 979 |
|
| 980 |
|
| 981 |
+
# Additional function to add at the bottom of TrendAnalysis.py to ensure browser opening works
|
| 982 |
+
# on direct dashboard access as well
|
| 983 |
+
|
| 984 |
+
@router.get("/dashboard/{userId}/{topic}/{year}")
|
| 985 |
+
@router.get("/dashboard/{userId}/{topic}")
|
| 986 |
+
async def get_dashboard(request: Request, userId: str, topic: str, year: str = None):
|
| 987 |
+
# Fetch and process data
|
| 988 |
+
from pydantic import BaseModel
|
| 989 |
+
import webbrowser
|
| 990 |
+
import threading
|
| 991 |
+
|
| 992 |
+
class TempRequest(BaseModel):
|
| 993 |
+
userId: str
|
| 994 |
+
topic: str
|
| 995 |
+
year: str = None
|
| 996 |
+
page: int = 0
|
| 997 |
+
|
| 998 |
+
data_request = TempRequest(userId=userId, topic=topic, year=year)
|
| 999 |
+
|
| 1000 |
+
# Get base URL from request and build complete URL
|
| 1001 |
+
base_url = str(request.base_url)
|
| 1002 |
+
dashboard_url = f"{base_url}dash"
|
| 1003 |
+
|
| 1004 |
+
# Open browser in a new thread
|
| 1005 |
+
def open_browser():
|
| 1006 |
+
webbrowser.open(dashboard_url,new=2)
|
| 1007 |
+
|
| 1008 |
+
# Start a thread to open the browser after a short delay
|
| 1009 |
+
browser_thread = threading.Timer(1.5, open_browser)
|
| 1010 |
+
browser_thread.daemon = True
|
| 1011 |
+
browser_thread.start()
|
| 1012 |
+
|
| 1013 |
+
# Reuse the analyze_trends logic to create the dashboard
|
| 1014 |
+
result = await analyze_trends(request, data_request)
|
| 1015 |
+
|
| 1016 |
+
# Redirect to the dash app
|
| 1017 |
+
from fastapi.responses import RedirectResponse
|
| 1018 |
+
return RedirectResponse(url="/dash")
|