Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -4,6 +4,8 @@ import pandas as pd
|
|
| 4 |
import plotly.graph_objects as go
|
| 5 |
from datetime import datetime
|
| 6 |
import os
|
|
|
|
|
|
|
| 7 |
|
| 8 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
| 9 |
|
|
@@ -387,10 +389,32 @@ def get_all_models(limit=3000):
|
|
| 387 |
if model.get('id', '') not in existing_ids:
|
| 388 |
all_models.append(model)
|
| 389 |
existing_ids.add(model.get('id', ''))
|
|
|
|
|
|
|
| 390 |
|
| 391 |
print(f"Total unique models: {len(all_models)}")
|
| 392 |
return all_models[:limit]
|
| 393 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 394 |
def get_models_data(progress=gr.Progress()):
|
| 395 |
def calculate_rank(model_id, all_global_models, korea_models):
|
| 396 |
# ๊ธ๋ก๋ฒ ์์ ํ์ธ
|
|
@@ -448,43 +472,32 @@ def get_models_data(progress=gr.Progress()):
|
|
| 448 |
# ์๊ฐํ๋ฅผ ์ํ Figure ์์ฑ
|
| 449 |
fig = go.Figure()
|
| 450 |
|
| 451 |
-
#
|
|
|
|
|
|
|
|
|
|
| 452 |
filtered_models = []
|
| 453 |
-
for model_id in
|
| 454 |
-
|
| 455 |
-
|
| 456 |
-
|
| 457 |
-
|
| 458 |
-
|
| 459 |
-
|
| 460 |
-
|
| 461 |
-
|
| 462 |
-
|
| 463 |
-
|
| 464 |
-
|
| 465 |
-
|
| 466 |
-
|
| 467 |
-
|
| 468 |
-
|
| 469 |
-
|
| 470 |
-
|
| 471 |
-
|
| 472 |
-
|
| 473 |
-
|
| 474 |
-
|
| 475 |
-
print(f"Model {model_id}: Rank={rank}, Is Korea={is_korea}")
|
| 476 |
-
else:
|
| 477 |
-
filtered_models.append({
|
| 478 |
-
'id': model_id,
|
| 479 |
-
'global_rank': 'Not in top 3000',
|
| 480 |
-
'downloads': 0,
|
| 481 |
-
'likes': 0,
|
| 482 |
-
'title': 'No Title',
|
| 483 |
-
'is_korea': False
|
| 484 |
-
})
|
| 485 |
-
except Exception as e:
|
| 486 |
-
print(f"Error processing {model_id}: {str(e)}")
|
| 487 |
-
continue
|
| 488 |
|
| 489 |
# ์์๋ก ์ ๋ ฌ
|
| 490 |
filtered_models.sort(key=lambda x: float('inf') if isinstance(x['global_rank'], str) else x['global_rank'])
|
|
@@ -920,7 +933,7 @@ def get_spaces_data(sort_type="trending", progress=gr.Progress()):
|
|
| 920 |
tickmode='array',
|
| 921 |
ticktext=[str(i) for i in range(1, 501, 20)], # 1๋ถํฐ 400๊น์ง 20 ๊ฐ๊ฒฉ์ผ๋ก ํ์
|
| 922 |
tickvals=[i for i in range(1, 501, 20)],
|
| 923 |
-
range=[1, 500]
|
| 924 |
),
|
| 925 |
height=800,
|
| 926 |
showlegend=False,
|
|
@@ -1088,8 +1101,6 @@ def get_trending_spaces_without_token():
|
|
| 1088 |
if not HF_TOKEN:
|
| 1089 |
get_trending_spaces = get_trending_spaces_without_token
|
| 1090 |
|
| 1091 |
-
|
| 1092 |
-
|
| 1093 |
def create_error_plot():
|
| 1094 |
fig = go.Figure()
|
| 1095 |
fig.add_annotation(
|
|
@@ -1107,7 +1118,6 @@ def create_error_plot():
|
|
| 1107 |
)
|
| 1108 |
return fig
|
| 1109 |
|
| 1110 |
-
|
| 1111 |
def create_space_info_html(spaces_data):
|
| 1112 |
if not spaces_data:
|
| 1113 |
return "<div style='padding: 20px;'><h2>๋ฐ์ดํฐ๋ฅผ ๋ถ๋ฌ์ค๋๋ฐ ์คํจํ์ต๋๋ค.</h2></div>"
|
|
@@ -1461,4 +1471,4 @@ demo.launch(
|
|
| 1461 |
server_port=7860,
|
| 1462 |
share=False,
|
| 1463 |
show_error=True
|
| 1464 |
-
)
|
|
|
|
| 4 |
import plotly.graph_objects as go
|
| 5 |
from datetime import datetime
|
| 6 |
import os
|
| 7 |
+
import asyncio
|
| 8 |
+
import aiohttp
|
| 9 |
|
| 10 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
| 11 |
|
|
|
|
| 389 |
if model.get('id', '') not in existing_ids:
|
| 390 |
all_models.append(model)
|
| 391 |
existing_ids.add(model.get('id', ''))
|
| 392 |
+
else:
|
| 393 |
+
print(f"Failed to fetch Korea models: {korea_response.status_code}")
|
| 394 |
|
| 395 |
print(f"Total unique models: {len(all_models)}")
|
| 396 |
return all_models[:limit]
|
| 397 |
|
| 398 |
+
# ๋น๋๊ธฐ API ํธ์ถ์ ์ํ ํจ์๋ค
|
| 399 |
+
async def fetch_model_data(session, model_id):
|
| 400 |
+
url = f"https://huggingface.co/api/models/{model_id.strip('/')}"
|
| 401 |
+
try:
|
| 402 |
+
async with session.get(url, headers={'Accept': 'application/json'}) as response:
|
| 403 |
+
if response.status == 200:
|
| 404 |
+
return await response.json()
|
| 405 |
+
else:
|
| 406 |
+
print(f"Failed to fetch {model_id}: {response.status}")
|
| 407 |
+
return None
|
| 408 |
+
except Exception as e:
|
| 409 |
+
print(f"Error fetching {model_id}: {e}")
|
| 410 |
+
return None
|
| 411 |
+
|
| 412 |
+
async def fetch_all_target_models(model_ids):
|
| 413 |
+
async with aiohttp.ClientSession() as session:
|
| 414 |
+
tasks = [fetch_model_data(session, model_id) for model_id in model_ids]
|
| 415 |
+
results = await asyncio.gather(*tasks)
|
| 416 |
+
return results
|
| 417 |
+
|
| 418 |
def get_models_data(progress=gr.Progress()):
|
| 419 |
def calculate_rank(model_id, all_global_models, korea_models):
|
| 420 |
# ๊ธ๋ก๋ฒ ์์ ํ์ธ
|
|
|
|
| 472 |
# ์๊ฐํ๋ฅผ ์ํ Figure ์์ฑ
|
| 473 |
fig = go.Figure()
|
| 474 |
|
| 475 |
+
# ๋น๋๊ธฐ API ํธ์ถ๋ก target_models ๋ฐ์ดํฐ ๊ฐ์ ธ์ค๊ธฐ
|
| 476 |
+
target_model_ids = list(target_models.keys())
|
| 477 |
+
model_results = asyncio.run(fetch_all_target_models(target_model_ids))
|
| 478 |
+
|
| 479 |
filtered_models = []
|
| 480 |
+
for model_id, model_data in zip(target_model_ids, model_results):
|
| 481 |
+
if model_data is not None:
|
| 482 |
+
rank, is_korea = calculate_rank(model_id, all_global_models, korea_models)
|
| 483 |
+
filtered_models.append({
|
| 484 |
+
'id': model_id,
|
| 485 |
+
'global_rank': rank,
|
| 486 |
+
'downloads': model_data.get('downloads', 0),
|
| 487 |
+
'likes': model_data.get('likes', 0),
|
| 488 |
+
'title': model_data.get('title', 'No Title'),
|
| 489 |
+
'is_korea': is_korea
|
| 490 |
+
})
|
| 491 |
+
print(f"Model {model_id}: Rank={rank}, Is Korea={is_korea}")
|
| 492 |
+
else:
|
| 493 |
+
filtered_models.append({
|
| 494 |
+
'id': model_id,
|
| 495 |
+
'global_rank': 'Not in top 3000',
|
| 496 |
+
'downloads': 0,
|
| 497 |
+
'likes': 0,
|
| 498 |
+
'title': 'No Title',
|
| 499 |
+
'is_korea': False
|
| 500 |
+
})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 501 |
|
| 502 |
# ์์๋ก ์ ๋ ฌ
|
| 503 |
filtered_models.sort(key=lambda x: float('inf') if isinstance(x['global_rank'], str) else x['global_rank'])
|
|
|
|
| 933 |
tickmode='array',
|
| 934 |
ticktext=[str(i) for i in range(1, 501, 20)], # 1๋ถํฐ 400๊น์ง 20 ๊ฐ๊ฒฉ์ผ๋ก ํ์
|
| 935 |
tickvals=[i for i in range(1, 501, 20)],
|
| 936 |
+
range=[1, 500]
|
| 937 |
),
|
| 938 |
height=800,
|
| 939 |
showlegend=False,
|
|
|
|
| 1101 |
if not HF_TOKEN:
|
| 1102 |
get_trending_spaces = get_trending_spaces_without_token
|
| 1103 |
|
|
|
|
|
|
|
| 1104 |
def create_error_plot():
|
| 1105 |
fig = go.Figure()
|
| 1106 |
fig.add_annotation(
|
|
|
|
| 1118 |
)
|
| 1119 |
return fig
|
| 1120 |
|
|
|
|
| 1121 |
def create_space_info_html(spaces_data):
|
| 1122 |
if not spaces_data:
|
| 1123 |
return "<div style='padding: 20px;'><h2>๋ฐ์ดํฐ๋ฅผ ๋ถ๋ฌ์ค๋๋ฐ ์คํจํ์ต๋๋ค.</h2></div>"
|
|
|
|
| 1471 |
server_port=7860,
|
| 1472 |
share=False,
|
| 1473 |
show_error=True
|
| 1474 |
+
)
|