Spaces:
Running
Running
| import gradio as gr | |
| import requests | |
| import pandas as pd | |
| import plotly.graph_objects as go | |
| from datetime import datetime | |
| import os | |
| HF_TOKEN = os.getenv("HF_TOKEN") | |
| target_models = { | |
| "openfree/flux-lora-korea-palace": "https://huggingface.co/openfree/flux-lora-korea-palace", | |
| "seawolf2357/hanbok": "https://huggingface.co/seawolf2357/hanbok", | |
| "seawolf2357/ntower": "https://huggingface.co/seawolf2357/ntower", | |
| "seawolf2357/flux-lora-military-artillery-k9": "https://huggingface.co/seawolf2357/flux-lora-military-artillery-k9", | |
| "openfree/claude-monet": "https://huggingface.co/openfree/claude-monet", | |
| "LGAI-EXAONE/EXAONE-3.5-32B-Instruct": "https://huggingface.co/LGAI-EXAONE/EXAONE-3.5-32B-Instruct", | |
| "LGAI-EXAONE/EXAONE-3.5-2.4B-Instruct": "https://huggingface.co/LGAI-EXAONE/EXAONE-3.5-2.4B-Instruct", | |
| "LGAI-EXAONE/EXAONE-3.5-7.8B-Instruct": "https://huggingface.co/LGAI-EXAONE/EXAONE-3.5-7.8B-Instruct", | |
| "ginipick/flux-lora-eric-cat": "https://huggingface.co/ginipick/flux-lora-eric-cat", | |
| "seawolf2357/flux-lora-car-rolls-royce": "https://huggingface.co/seawolf2357/flux-lora-car-rolls-royce", | |
| "moreh/Llama-3-Motif-102B-Instruct": "https://huggingface.co/moreh/Llama-3-Motif-102B-Instruct", | |
| "OnomaAIResearch/Illustrious-xl-early-release-v0": "https://huggingface.co/OnomaAIResearch/Illustrious-xl-early-release-v0", | |
| "upstage/solar-pro-preview-instruct": "https://huggingface.co/upstage/solar-pro-preview-instruct", | |
| "NCSOFT/VARCO-VISION-14B": "https://huggingface.co/NCSOFT/VARCO-VISION-14B", | |
| "NCSOFT/Llama-VARCO-8B-Instruct": "https://huggingface.co/NCSOFT/Llama-VARCO-8B-Instruct", | |
| "NCSOFT/VARCO-VISION-14B-HF": "https://huggingface.co/NCSOFT/VARCO-VISION-14B-HF", | |
| "KAERI-MLP/llama-3.1-Korean-AtomicGPT-Bllossom-8B": "https://huggingface.co/KAERI-MLP/llama-3.1-Korean-AtomicGPT-Bllossom-8B", | |
| "dnotitia/Llama-DNA-1.0-8B-Instruct": "https://huggingface.co/dnotitia/Llama-DNA-1.0-8B-Instruct", | |
| "Bllossom/llama-3.2-Korean-Bllossom-3B": "https://huggingface.co/Bllossom/llama-3.2-Korean-Bllossom-3B", | |
| "unidocs/llama-3.1-8b-komedic-instruct": "https://huggingface.co/unidocs/llama-3.1-8b-komedic-instruct", | |
| "unidocs/llama-3.2-3b-komedic-instruct": "https://huggingface.co/unidocs/llama-3.2-3b-komedic-instruct", | |
| "etri-lirs/eagle-3b-preview": "https://huggingface.co/etri-lirs/eagle-3b-preview", | |
| "kakaobrain/kogpt": "https://huggingface.co/kakaobrain/kogpt", | |
| "Saxo/Linkbricks-Horizon-AI-Korean-Gemma-2-sft-dpo-27B": "https://huggingface.co/Saxo/Linkbricks-Horizon-AI-Korean-Gemma-2-sft-dpo-27B", | |
| "AALF/gemma-2-27b-it-SimPO-37K": "https://huggingface.co/AALF/gemma-2-27b-it-SimPO-37K", | |
| "nbeerbower/mistral-nemo-wissenschaft-12B": "https://huggingface.co/nbeerbower/mistral-nemo-wissenschaft-12B", | |
| "Saxo/Linkbricks-Horizon-AI-Korean-Mistral-Nemo-sft-dpo-12B": "https://huggingface.co/Saxo/Linkbricks-Horizon-AI-Korean-Mistral-Nemo-sft-dpo-12B", | |
| "princeton-nlp/gemma-2-9b-it-SimPO": "https://huggingface.co/princeton-nlp/gemma-2-9b-it-SimPO", | |
| "migtissera/Tess-v2.5-Gemma-2-27B-alpha": "https://huggingface.co/migtissera/Tess-v2.5-Gemma-2-27B-alpha", | |
| "DeepMount00/Llama-3.1-8b-Ita": "https://huggingface.co/DeepMount00/Llama-3.1-8b-Ita", | |
| "cognitivecomputations/dolphin-2.9.3-mistral-nemo-12b": "https://huggingface.co/cognitivecomputations/dolphin-2.9.3-mistral-nemo-12b", | |
| "ai-human-lab/EEVE-Korean_Instruct-10.8B-expo": "https://huggingface.co/ai-human-lab/EEVE-Korean_Instruct-10.8B-expo", | |
| "VAGOsolutions/Llama-3.1-SauerkrautLM-8b-Instruct": "https://huggingface.co/VAGOsolutions/Llama-3.1-SauerkrautLM-8b-Instruct", | |
| "Saxo/Linkbricks-Horizon-AI-Korean-llama-3.1-sft-dpo-8B": "https://huggingface.co/Saxo/Linkbricks-Horizon-AI-Korean-llama-3.1-sft-dpo-8B", | |
| "AIDX-ktds/ktdsbaseLM-v0.12-based-on-openchat3.5": "https://huggingface.co/AIDX-ktds/ktdsbaseLM-v0.12-based-on-openchat3.5", | |
| "mlabonne/Daredevil-8B-abliterated": "https://huggingface.co/mlabonne/Daredevil-8B-abliterated", | |
| "ENERGY-DRINK-LOVE/eeve_dpo-v3": "https://huggingface.co/ENERGY-DRINK-LOVE/eeve_dpo-v3", | |
| "migtissera/Trinity-2-Codestral-22B": "https://huggingface.co/migtissera/Trinity-2-Codestral-22B", | |
| "Saxo/Linkbricks-Horizon-AI-Korean-llama3.1-sft-rlhf-dpo-8B": "https://huggingface.co/Saxo/Linkbricks-Horizon-AI-Korean-llama3.1-sft-rlhf-dpo-8B", | |
| "mlabonne/Daredevil-8B-abliterated-dpomix": "https://huggingface.co/mlabonne/Daredevil-8B-abliterated-dpomix", | |
| "yanolja/EEVE-Korean-Instruct-10.8B-v1.0": "https://huggingface.co/yanolja/EEVE-Korean-Instruct-10.8B-v1.0", | |
| "vicgalle/Configurable-Llama-3.1-8B-Instruct": "https://huggingface.co/vicgalle/Configurable-Llama-3.1-8B-Instruct", | |
| "T3Q-LLM/T3Q-LLM1-sft1.0-dpo1.0": "https://huggingface.co/T3Q-LLM/T3Q-LLM1-sft1.0-dpo1.0", | |
| "Eurdem/Defne-llama3.1-8B": "https://huggingface.co/Eurdem/Defne-llama3.1-8B", | |
| "BAAI/Infinity-Instruct-7M-Gen-Llama3_1-8B": "https://huggingface.co/BAAI/Infinity-Instruct-7M-Gen-Llama3_1-8B", | |
| "BAAI/Infinity-Instruct-3M-0625-Llama3-8B": "https://huggingface.co/BAAI/Infinity-Instruct-3M-0625-Llama3-8B", | |
| "T3Q-LLM/T3Q-LLM-sft1.0-dpo1.0": "https://huggingface.co/T3Q-LLM/T3Q-LLM-sft1.0-dpo1.0", | |
| "BAAI/Infinity-Instruct-7M-0729-Llama3_1-8B": "https://huggingface.co/BAAI/Infinity-Instruct-7M-0729-Llama3_1-8B", | |
| "mightbe/EEVE-10.8B-Multiturn": "https://huggingface.co/mightbe/EEVE-10.8B-Multiturn", | |
| "hyemijo/omed-llama3.1-8b": "https://huggingface.co/hyemijo/omed-llama3.1-8b", | |
| "yanolja/Bookworm-10.7B-v0.4-DPO": "https://huggingface.co/yanolja/Bookworm-10.7B-v0.4-DPO", | |
| "algograp-Inc/algograpV4": "https://huggingface.co/algograp-Inc/algograpV4", | |
| "lightblue/suzume-llama-3-8B-multilingual-orpo-borda-top75": "https://huggingface.co/lightblue/suzume-llama-3-8B-multilingual-orpo-borda-top75", | |
| "chihoonlee10/T3Q-LLM-MG-DPO-v1.0": "https://huggingface.co/chihoonlee10/T3Q-LLM-MG-DPO-v1.0", | |
| "vicgalle/Configurable-Hermes-2-Pro-Llama-3-8B": "https://huggingface.co/vicgalle/Configurable-Hermes-2-Pro-Llama-3-8B", | |
| "RLHFlow/LLaMA3-iterative-DPO-final": "https://huggingface.co/RLHFlow/LLaMA3-iterative-DPO-final", | |
| "SEOKDONG/llama3.1_korean_v0.1_sft_by_aidx": "https://huggingface.co/SEOKDONG/llama3.1_korean_v0.1_sft_by_aidx", | |
| "spow12/Ko-Qwen2-7B-Instruct": "https://huggingface.co/spow12/Ko-Qwen2-7B-Instruct", | |
| "BAAI/Infinity-Instruct-3M-0625-Qwen2-7B": "https://huggingface.co/BAAI/Infinity-Instruct-3M-0625-Qwen2-7B", | |
| "lightblue/suzume-llama-3-8B-multilingual-orpo-borda-half": "https://huggingface.co/lightblue/suzume-llama-3-8B-multilingual-orpo-borda-half", | |
| "T3Q-LLM/T3Q-LLM1-CV-v2.0": "https://huggingface.co/T3Q-LLM/T3Q-LLM1-CV-v2.0", | |
| "migtissera/Trinity-2-Codestral-22B-v0.2": "https://huggingface.co/migtissera/Trinity-2-Codestral-22B-v0.2", | |
| "sinjy1203/EEVE-Korean-Instruct-10.8B-v1.0-Grade-Retrieval": "https://huggingface.co/sinjy1203/EEVE-Korean-Instruct-10.8B-v1.0-Grade-Retrieval", | |
| "MaziyarPanahi/Llama-3-8B-Instruct-v0.10": "https://huggingface.co/MaziyarPanahi/Llama-3-8B-Instruct-v0.10", | |
| "MaziyarPanahi/Llama-3-8B-Instruct-v0.9": "https://huggingface.co/MaziyarPanahi/Llama-3-8B-Instruct-v0.9", | |
| "zhengr/MixTAO-7Bx2-MoE-v8.1": "https://huggingface.co/zhengr/MixTAO-7Bx2-MoE-v8.1", | |
| "TIGER-Lab/MAmmoTH2-8B-Plus": "https://huggingface.co/TIGER-Lab/MAmmoTH2-8B-Plus", | |
| "OpenBuddy/openbuddy-qwen1.5-14b-v21.1-32k": "https://huggingface.co/OpenBuddy/openbuddy-qwen1.5-14b-v21.1-32k", | |
| "haoranxu/Llama-3-Instruct-8B-CPO-SimPO": "https://huggingface.co/haoranxu/Llama-3-Instruct-8B-CPO-SimPO", | |
| "Weyaxi/Einstein-v7-Qwen2-7B": "https://huggingface.co/Weyaxi/Einstein-v7-Qwen2-7B", | |
| "DKYoon/kosolar-hermes-test": "https://huggingface.co/DKYoon/kosolar-hermes-test", | |
| "vilm/Quyen-Pro-v0.1": "https://huggingface.co/vilm/Quyen-Pro-v0.1", | |
| "chihoonlee10/T3Q-LLM-MG-v1.0": "https://huggingface.co/chihoonlee10/T3Q-LLM-MG-v1.0", | |
| "lightblue/suzume-llama-3-8B-multilingual-orpo-borda-top25": "https://huggingface.co/lightblue/suzume-llama-3-8B-multilingual-orpo-borda-top25", | |
| "ai-human-lab/EEVE-Korean-10.8B-RAFT": "https://huggingface.co/ai-human-lab/EEVE-Korean-10.8B-RAFT", | |
| "princeton-nlp/Llama-3-Base-8B-SFT-RDPO": "https://huggingface.co/princeton-nlp/Llama-3-Base-8B-SFT-RDPO", | |
| "MaziyarPanahi/Llama-3-8B-Instruct-v0.8": "https://huggingface.co/MaziyarPanahi/Llama-3-8B-Instruct-v0.8", | |
| "chihoonlee10/T3Q-ko-solar-dpo-v7.0": "https://huggingface.co/chihoonlee10/T3Q-ko-solar-dpo-v7.0", | |
| "jondurbin/bagel-8b-v1.0": "https://huggingface.co/jondurbin/bagel-8b-v1.0", | |
| "DeepMount00/Llama-3-8b-Ita": "https://huggingface.co/DeepMount00/Llama-3-8b-Ita", | |
| "VAGOsolutions/Llama-3-SauerkrautLM-8b-Instruct": "https://huggingface.co/VAGOsolutions/Llama-3-SauerkrautLM-8b-Instruct", | |
| "princeton-nlp/Llama-3-Instruct-8B-ORPO-v0.2": "https://huggingface.co/princeton-nlp/Llama-3-Instruct-8B-ORPO-v0.2", | |
| "AIDX-ktds/ktdsbaseLM-v0.11-based-on-openchat3.5": "https://huggingface.co/AIDX-ktds/ktdsbaseLM-v0.11-based-on-openchat3.5", | |
| "princeton-nlp/Llama-3-Base-8B-SFT-KTO": "https://huggingface.co/princeton-nlp/Llama-3-Base-8B-SFT-KTO", | |
| "maywell/Mini_Synatra_SFT": "https://huggingface.co/maywell/Mini_Synatra_SFT", | |
| "princeton-nlp/Llama-3-Base-8B-SFT-ORPO": "https://huggingface.co/princeton-nlp/Llama-3-Base-8B-SFT-ORPO", | |
| "princeton-nlp/Llama-3-Instruct-8B-CPO-v0.2": "https://huggingface.co/princeton-nlp/Llama-3-Instruct-8B-CPO-v0.2", | |
| "spow12/Qwen2-7B-ko-Instruct-orpo-ver_2.0_wo_chat": "https://huggingface.co/spow12/Qwen2-7B-ko-Instruct-orpo-ver_2.0_wo_chat", | |
| "princeton-nlp/Llama-3-Base-8B-SFT-DPO": "https://huggingface.co/princeton-nlp/Llama-3-Base-8B-SFT-DPO", | |
| "princeton-nlp/Llama-3-Instruct-8B-ORPO": "https://huggingface.co/princeton-nlp/Llama-3-Instruct-8B-ORPO", | |
| "lcw99/llama-3-10b-it-kor-extented-chang": "https://huggingface.co/lcw99/llama-3-10b-it-kor-extented-chang", | |
| "migtissera/Llama-3-8B-Synthia-v3.5": "https://huggingface.co/migtissera/Llama-3-8B-Synthia-v3.5", | |
| "megastudyedu/M-SOLAR-10.7B-v1.4-dpo": "https://huggingface.co/megastudyedu/M-SOLAR-10.7B-v1.4-dpo", | |
| "T3Q-LLM/T3Q-LLM-solar10.8-sft-v1.0": "https://huggingface.co/T3Q-LLM/T3Q-LLM-solar10.8-sft-v1.0", | |
| "maywell/Synatra-10.7B-v0.4": "https://huggingface.co/maywell/Synatra-10.7B-v0.4", | |
| "nlpai-lab/KULLM3": "https://huggingface.co/nlpai-lab/KULLM3", | |
| "abacusai/Llama-3-Smaug-8B": "https://huggingface.co/abacusai/Llama-3-Smaug-8B", | |
| "gwonny/nox-solar-10.7b-v4-kolon-ITD-5-v2.1": "https://huggingface.co/gwonny/nox-solar-10.7b-v4-kolon-ITD-5-v2.1", | |
| "BAAI/Infinity-Instruct-3M-0625-Mistral-7B": "https://huggingface.co/BAAI/Infinity-Instruct-3M-0625-Mistral-7B", | |
| "openchat/openchat_3.5": "https://huggingface.co/openchat/openchat_3.5", | |
| "T3Q-LLM/T3Q-LLM1-v2.0": "https://huggingface.co/T3Q-LLM/T3Q-LLM1-v2.0", | |
| "T3Q-LLM/T3Q-LLM1-CV-v1.0": "https://huggingface.co/T3Q-LLM/T3Q-LLM1-CV-v1.0", | |
| "ONS-AI-RESEARCH/ONS-SOLAR-10.7B-v1.1": "https://huggingface.co/ONS-AI-RESEARCH/ONS-SOLAR-10.7B-v1.1", | |
| "macadeliccc/Samantha-Qwen-2-7B": "https://huggingface.co/macadeliccc/Samantha-Qwen-2-7B", | |
| "openchat/openchat-3.5-0106": "https://huggingface.co/openchat/openchat-3.5-0106", | |
| "NousResearch/Nous-Hermes-2-SOLAR-10.7B": "https://huggingface.co/NousResearch/Nous-Hermes-2-SOLAR-10.7B", | |
| "UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter1": "https://huggingface.co/UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter1", | |
| "MTSAIR/multi_verse_model": "https://huggingface.co/MTSAIR/multi_verse_model", | |
| "gwonny/nox-solar-10.7b-v4-kolon-ITD-5-v2.0": "https://huggingface.co/gwonny/nox-solar-10.7b-v4-kolon-ITD-5-v2.0", | |
| "VIRNECT/llama-3-Korean-8B": "https://huggingface.co/VIRNECT/llama-3-Korean-8B", | |
| "ENERGY-DRINK-LOVE/SOLAR_merge_DPOv3": "https://huggingface.co/ENERGY-DRINK-LOVE/SOLAR_merge_DPOv3", | |
| "SeaLLMs/SeaLLMs-v3-7B-Chat": "https://huggingface.co/SeaLLMs/SeaLLMs-v3-7B-Chat", | |
| "VIRNECT/llama-3-Korean-8B-V2": "https://huggingface.co/VIRNECT/llama-3-Korean-8B-V2", | |
| "MLP-KTLim/llama-3-Korean-Bllossom-8B": "https://huggingface.co/MLP-KTLim/llama-3-Korean-Bllossom-8B", | |
| "Magpie-Align/Llama-3-8B-Magpie-Align-v0.3": "https://huggingface.co/Magpie-Align/Llama-3-8B-Magpie-Align-v0.3", | |
| "cognitivecomputations/Llama-3-8B-Instruct-abliterated-v2": "https://huggingface.co/cognitivecomputations/Llama-3-8B-Instruct-abliterated-v2", | |
| "SkyOrbis/SKY-Ko-Llama3-8B-lora": "https://huggingface.co/SkyOrbis/SKY-Ko-Llama3-8B-lora", | |
| "4yo1/llama3-eng-ko-8b-sl5": "https://huggingface.co/4yo1/llama3-eng-ko-8b-sl5", | |
| "kimwooglae/WebSquareAI-Instruct-llama-3-8B-v0.5.39": "https://huggingface.co/kimwooglae/WebSquareAI-Instruct-llama-3-8B-v0.5.39", | |
| "ONS-AI-RESEARCH/ONS-SOLAR-10.7B-v1.2": "https://huggingface.co/ONS-AI-RESEARCH/ONS-SOLAR-10.7B-v1.2", | |
| "lcw99/llama-3-10b-it-kor-extented-chang-pro8": "https://huggingface.co/lcw99/llama-3-10b-it-kor-extented-chang-pro8", | |
| "BAAI/Infinity-Instruct-3M-0625-Yi-1.5-9B": "https://huggingface.co/BAAI/Infinity-Instruct-3M-0625-Yi-1.5-9B", | |
| "migtissera/Tess-2.0-Llama-3-8B": "https://huggingface.co/migtissera/Tess-2.0-Llama-3-8B", | |
| "BAAI/Infinity-Instruct-3M-0613-Mistral-7B": "https://huggingface.co/BAAI/Infinity-Instruct-3M-0613-Mistral-7B", | |
| "yeonwoo780/cydinfo-llama3-8b-lora-v01": "https://huggingface.co/yeonwoo780/cydinfo-llama3-8b-lora-v01", | |
| "vicgalle/ConfigurableSOLAR-10.7B": "https://huggingface.co/vicgalle/ConfigurableSOLAR-10.7B", | |
| "chihoonlee10/T3Q-ko-solar-jo-v1.0": "https://huggingface.co/chihoonlee10/T3Q-ko-solar-jo-v1.0", | |
| "Kukedlc/NeuralLLaMa-3-8b-ORPO-v0.4": "https://huggingface.co/Kukedlc/NeuralLLaMa-3-8b-ORPO-v0.4", | |
| "Edentns/DataVortexS-10.7B-dpo-v1.0": "https://huggingface.co/Edentns/DataVortexS-10.7B-dpo-v1.0", | |
| "SJ-Donald/SJ-SOLAR-10.7b-DPO": "https://huggingface.co/SJ-Donald/SJ-SOLAR-10.7b-DPO", | |
| "lemon-mint/gemma-ko-7b-it-v0.40": "https://huggingface.co/lemon-mint/gemma-ko-7b-it-v0.40", | |
| "GyuHyeonWkdWkdMan/naps-llama-3.1-8b-instruct-v0.3": "https://huggingface.co/GyuHyeonWkdWkdMan/naps-llama-3.1-8b-instruct-v0.3", | |
| "hyeogi/SOLAR-10.7B-v1.5": "https://huggingface.co/hyeogi/SOLAR-10.7B-v1.5", | |
| "etri-xainlp/llama3-8b-dpo_v1": "https://huggingface.co/etri-xainlp/llama3-8b-dpo_v1", | |
| "LDCC/LDCC-SOLAR-10.7B": "https://huggingface.co/LDCC/LDCC-SOLAR-10.7B", | |
| "chlee10/T3Q-Llama3-8B-Inst-sft1.0": "https://huggingface.co/chlee10/T3Q-Llama3-8B-Inst-sft1.0", | |
| "lemon-mint/gemma-ko-7b-it-v0.41": "https://huggingface.co/lemon-mint/gemma-ko-7b-it-v0.41", | |
| "chlee10/T3Q-Llama3-8B-sft1.0-dpo1.0": "https://huggingface.co/chlee10/T3Q-Llama3-8B-sft1.0-dpo1.0", | |
| "maywell/Synatra-7B-Instruct-v0.3-pre": "https://huggingface.co/maywell/Synatra-7B-Instruct-v0.3-pre", | |
| "UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter2": "https://huggingface.co/UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter2", | |
| "hwkwon/S-SOLAR-10.7B-v1.4": "https://huggingface.co/hwkwon/S-SOLAR-10.7B-v1.4", | |
| "12thD/ko-Llama-3-8B-sft-v0.3": "https://huggingface.co/12thD/ko-Llama-3-8B-sft-v0.3", | |
| "hkss/hk-SOLAR-10.7B-v1.4": "https://huggingface.co/hkss/hk-SOLAR-10.7B-v1.4", | |
| "lookuss/test-llilu": "https://huggingface.co/lookuss/test-llilu", | |
| "chihoonlee10/T3Q-ko-solar-dpo-v3.0": "https://huggingface.co/chihoonlee10/T3Q-ko-solar-dpo-v3.0", | |
| "chihoonlee10/T3Q-ko-solar-dpo-v1.0": "https://huggingface.co/chihoonlee10/T3Q-ko-solar-dpo-v1.0", | |
| "lcw99/llama-3-10b-wiki-240709-f": "https://huggingface.co/lcw99/llama-3-10b-wiki-240709-f", | |
| "Edentns/DataVortexS-10.7B-v0.4": "https://huggingface.co/Edentns/DataVortexS-10.7B-v0.4", | |
| "princeton-nlp/Llama-3-Instruct-8B-KTO": "https://huggingface.co/princeton-nlp/Llama-3-Instruct-8B-KTO", | |
| "spow12/kosolar_4.1_sft": "https://huggingface.co/spow12/kosolar_4.1_sft", | |
| "natong19/Qwen2-7B-Instruct-abliterated": "https://huggingface.co/natong19/Qwen2-7B-Instruct-abliterated", | |
| "megastudyedu/ME-dpo-7B-v1.1": "https://huggingface.co/megastudyedu/ME-dpo-7B-v1.1", | |
| "01-ai/Yi-1.5-9B-Chat-16K": "https://huggingface.co/01-ai/Yi-1.5-9B-Chat-16K", | |
| "Edentns/DataVortexS-10.7B-dpo-v0.1": "https://huggingface.co/Edentns/DataVortexS-10.7B-dpo-v0.1", | |
| "Alphacode-AI/AlphaMist7B-slr-v4-slow": "https://huggingface.co/Alphacode-AI/AlphaMist7B-slr-v4-slow", | |
| "chihoonlee10/T3Q-ko-solar-sft-dpo-v1.0": "https://huggingface.co/chihoonlee10/T3Q-ko-solar-sft-dpo-v1.0", | |
| "hwkwon/S-SOLAR-10.7B-v1.1": "https://huggingface.co/hwkwon/S-SOLAR-10.7B-v1.1", | |
| "DopeorNope/Dear_My_best_Friends-13B": "https://huggingface.co/DopeorNope/Dear_My_best_Friends-13B", | |
| "GyuHyeonWkdWkdMan/NAPS-llama-3.1-8b-instruct-v0.3.2": "https://huggingface.co/GyuHyeonWkdWkdMan/NAPS-llama-3.1-8b-instruct-v0.3.2", | |
| "PathFinderKR/Waktaverse-Llama-3-KO-8B-Instruct": "https://huggingface.co/PathFinderKR/Waktaverse-Llama-3-KO-8B-Instruct", | |
| "vicgalle/ConfigurableHermes-7B": "https://huggingface.co/vicgalle/ConfigurableHermes-7B", | |
| "maywell/PiVoT-10.7B-Mistral-v0.2": "https://huggingface.co/maywell/PiVoT-10.7B-Mistral-v0.2", | |
| "failspy/Meta-Llama-3-8B-Instruct-abliterated-v3": "https://huggingface.co/failspy/Meta-Llama-3-8B-Instruct-abliterated-v3", | |
| "lemon-mint/gemma-ko-7b-instruct-v0.50": "https://huggingface.co/lemon-mint/gemma-ko-7b-instruct-v0.50", | |
| "ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_Open-Hermes_LDCC-SOLAR-10.7B_SFT": "https://huggingface.co/ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_Open-Hermes_LDCC-SOLAR-10.7B_SFT", | |
| "maywell/PiVoT-0.1-early": "https://huggingface.co/maywell/PiVoT-0.1-early", | |
| "hwkwon/S-SOLAR-10.7B-v1.3": "https://huggingface.co/hwkwon/S-SOLAR-10.7B-v1.3", | |
| "werty1248/Llama-3-Ko-8B-Instruct-AOG": "https://huggingface.co/werty1248/Llama-3-Ko-8B-Instruct-AOG", | |
| "Alphacode-AI/AlphaMist7B-slr-v2": "https://huggingface.co/Alphacode-AI/AlphaMist7B-slr-v2", | |
| "maywell/koOpenChat-sft": "https://huggingface.co/maywell/koOpenChat-sft", | |
| "lemon-mint/gemma-7b-openhermes-v0.80": "https://huggingface.co/lemon-mint/gemma-7b-openhermes-v0.80", | |
| "VIRNECT/llama-3-Korean-8B-r-v1": "https://huggingface.co/VIRNECT/llama-3-Korean-8B-r-v1", | |
| "Alphacode-AI/AlphaMist7B-slr-v1": "https://huggingface.co/Alphacode-AI/AlphaMist7B-slr-v1", | |
| "Loyola/Mistral-7b-ITmodel": "https://huggingface.co/Loyola/Mistral-7b-ITmodel", | |
| "VIRNECT/llama-3-Korean-8B-r-v2": "https://huggingface.co/VIRNECT/llama-3-Korean-8B-r-v2", | |
| "NLPark/AnFeng_v3.1-Avocet": "https://huggingface.co/NLPark/AnFeng_v3.1-Avocet", | |
| "maywell/Synatra_TbST11B_EP01": "https://huggingface.co/maywell/Synatra_TbST11B_EP01", | |
| "GritLM/GritLM-7B-KTO": "https://huggingface.co/GritLM/GritLM-7B-KTO", | |
| "01-ai/Yi-34B-Chat": "https://huggingface.co/01-ai/Yi-34B-Chat", | |
| "ValiantLabs/Llama3.1-8B-ShiningValiant2": "https://huggingface.co/ValiantLabs/Llama3.1-8B-ShiningValiant2", | |
| "princeton-nlp/Llama-3-Base-8B-SFT-CPO": "https://huggingface.co/princeton-nlp/Llama-3-Base-8B-SFT-CPO", | |
| "hyokwan/hkcode_llama3_8b": "https://huggingface.co/hyokwan/hkcode_llama3_8b", | |
| "UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter3": "https://huggingface.co/UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter3", | |
| "yuntaeyang/SOLAR-10.7B-Instructlora_sftt-v1.0": "https://huggingface.co/yuntaeyang/SOLAR-10.7B-Instructlora_sftt-v1.0", | |
| "juungwon/Llama-3-cs-LoRA": "https://huggingface.co/juungwon/Llama-3-cs-LoRA", | |
| "gangyeolkim/llama-3-chat": "https://huggingface.co/gangyeolkim/llama-3-chat", | |
| "mncai/llama2-13b-dpo-v3": "https://huggingface.co/mncai/llama2-13b-dpo-v3", | |
| "maywell/Synatra-Zephyr-7B-v0.01": "https://huggingface.co/maywell/Synatra-Zephyr-7B-v0.01", | |
| "ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_deup_LDCC-SOLAR-10.7B_SFT": "https://huggingface.co/ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_deup_LDCC-SOLAR-10.7B_SFT", | |
| "juungwon/Llama-3-constructionsafety-LoRA": "https://huggingface.co/juungwon/Llama-3-constructionsafety-LoRA", | |
| "princeton-nlp/Mistral-7B-Base-SFT-SimPO": "https://huggingface.co/princeton-nlp/Mistral-7B-Base-SFT-SimPO", | |
| "moondriller/solar10B-eugeneparkthebestv2": "https://huggingface.co/moondriller/solar10B-eugeneparkthebestv2", | |
| "chlee10/T3Q-LLM3-Llama3-sft1.0-dpo1.0": "https://huggingface.co/chlee10/T3Q-LLM3-Llama3-sft1.0-dpo1.0", | |
| "Edentns/DataVortexS-10.7B-dpo-v1.7": "https://huggingface.co/Edentns/DataVortexS-10.7B-dpo-v1.7", | |
| "gamzadole/llama3_instruct_tuning_without_pretraing": "https://huggingface.co/gamzadole/llama3_instruct_tuning_without_pretraing", | |
| "saltlux/Ko-Llama3-Luxia-8B": "https://huggingface.co/saltlux/Ko-Llama3-Luxia-8B", | |
| "kimdeokgi/ko-pt-model-test1": "https://huggingface.co/kimdeokgi/ko-pt-model-test1", | |
| "maywell/Synatra-11B-Testbench-2": "https://huggingface.co/maywell/Synatra-11B-Testbench-2", | |
| "Danielbrdz/Barcenas-14b-Phi-3-medium-ORPO": "https://huggingface.co/Danielbrdz/Barcenas-14b-Phi-3-medium-ORPO", | |
| "vicgalle/Configurable-Mistral-7B": "https://huggingface.co/vicgalle/Configurable-Mistral-7B", | |
| "ENERGY-DRINK-LOVE/leaderboard_inst_v1.5_LDCC-SOLAR-10.7B_SFT": "https://huggingface.co/ENERGY-DRINK-LOVE/leaderboard_inst_v1.5_LDCC-SOLAR-10.7B_SFT", | |
| "beomi/Llama-3-Open-Ko-8B-Instruct-preview": "https://huggingface.co/beomi/Llama-3-Open-Ko-8B-Instruct-preview", | |
| "Edentns/DataVortexS-10.7B-dpo-v1.3": "https://huggingface.co/Edentns/DataVortexS-10.7B-dpo-v1.3", | |
| "spow12/Llama3_ko_4.2_sft": "https://huggingface.co/spow12/Llama3_ko_4.2_sft", | |
| "maywell/Llama-3-Ko-8B-Instruct": "https://huggingface.co/maywell/Llama-3-Ko-8B-Instruct", | |
| "T3Q-LLM/T3Q-LLM3-NC-v1.0": "https://huggingface.co/T3Q-LLM/T3Q-LLM3-NC-v1.0", | |
| "ehartford/dolphin-2.2.1-mistral-7b": "https://huggingface.co/ehartford/dolphin-2.2.1-mistral-7b", | |
| "hwkwon/S-SOLAR-10.7B-SFT-v1.3": "https://huggingface.co/hwkwon/S-SOLAR-10.7B-SFT-v1.3", | |
| "sel303/llama3-instruct-diverce-v2.0": "https://huggingface.co/sel303/llama3-instruct-diverce-v2.0", | |
| "4yo1/llama3-eng-ko-8b-sl3": "https://huggingface.co/4yo1/llama3-eng-ko-8b-sl3", | |
| "hkss/hk-SOLAR-10.7B-v1.1": "https://huggingface.co/hkss/hk-SOLAR-10.7B-v1.1", | |
| "Open-Orca/Mistral-7B-OpenOrca": "https://huggingface.co/Open-Orca/Mistral-7B-OpenOrca", | |
| "hyokwan/familidata": "https://huggingface.co/hyokwan/familidata", | |
| "uukuguy/zephyr-7b-alpha-dare-0.85": "https://huggingface.co/uukuguy/zephyr-7b-alpha-dare-0.85", | |
| "gwonny/nox-solar-10.7b-v4-kolon-all-5": "https://huggingface.co/gwonny/nox-solar-10.7b-v4-kolon-all-5", | |
| "shleeeee/mistral-ko-tech-science-v1": "https://huggingface.co/shleeeee/mistral-ko-tech-science-v1", | |
| "Deepnoid/deep-solar-eeve-KorSTS": "https://huggingface.co/Deepnoid/deep-solar-eeve-KorSTS", | |
| "AIdenU/Mistral-7B-v0.2-ko-Y24_v1.0": "https://huggingface.co/AIdenU/Mistral-7B-v0.2-ko-Y24_v1.0", | |
| "tlphams/gollm-tendency-45": "https://huggingface.co/tlphams/gollm-tendency-45", | |
| "realPCH/ko_solra_merge": "https://huggingface.co/realPCH/ko_solra_merge", | |
| "Cartinoe5930/original-KoRAE-13b": "https://huggingface.co/Cartinoe5930/original-KoRAE-13b", | |
| "GAI-LLM/Yi-Ko-6B-dpo-v5": "https://huggingface.co/GAI-LLM/Yi-Ko-6B-dpo-v5", | |
| "Minirecord/Mini_DPO_test02": "https://huggingface.co/Minirecord/Mini_DPO_test02", | |
| "AIJUUD/juud-Mistral-7B-dpo": "https://huggingface.co/AIJUUD/juud-Mistral-7B-dpo", | |
| "gwonny/nox-solar-10.7b-v4-kolon-all-10": "https://huggingface.co/gwonny/nox-solar-10.7b-v4-kolon-all-10", | |
| "jieunhan/TEST_MODEL": "https://huggingface.co/jieunhan/TEST_MODEL", | |
| "etri-xainlp/kor-llama2-13b-dpo": "https://huggingface.co/etri-xainlp/kor-llama2-13b-dpo", | |
| "ifuseok/yi-ko-playtus-instruct-v0.2": "https://huggingface.co/ifuseok/yi-ko-playtus-instruct-v0.2", | |
| "Cartinoe5930/original-KoRAE-13b-3ep": "https://huggingface.co/Cartinoe5930/original-KoRAE-13b-3ep", | |
| "Trofish/KULLM-RLHF": "https://huggingface.co/Trofish/KULLM-RLHF", | |
| "wkshin89/Yi-Ko-6B-Instruct-v1.0": "https://huggingface.co/wkshin89/Yi-Ko-6B-Instruct-v1.0", | |
| "momo/polyglot-ko-12.8b-Chat-QLoRA-Merge": "https://huggingface.co/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge", | |
| "PracticeLLM/Custom-KoLLM-13B-v5": "https://huggingface.co/PracticeLLM/Custom-KoLLM-13B-v5", | |
| "BAAI/Infinity-Instruct-3M-0625-Yi-1.5-9B": "https://huggingface.co/BAAI/Infinity-Instruct-3M-0625-Yi-1.5-9B", | |
| "MRAIRR/minillama3_8b_all": "https://huggingface.co/MRAIRR/minillama3_8b_all", | |
| "failspy/Phi-3-medium-4k-instruct-abliterated-v3": "https://huggingface.co/failspy/Phi-3-medium-4k-instruct-abliterated-v3", | |
| "DILAB-HYU/koquality-polyglot-12.8b": "https://huggingface.co/DILAB-HYU/koquality-polyglot-12.8b", | |
| "kyujinpy/Korean-OpenOrca-v3": "https://huggingface.co/kyujinpy/Korean-OpenOrca-v3", | |
| "4yo1/llama3-eng-ko-8b": "https://huggingface.co/4yo1/llama3-eng-ko-8b", | |
| "4yo1/llama3-eng-ko-8": "https://huggingface.co/4yo1/llama3-eng-ko-8", | |
| "4yo1/llama3-eng-ko-8-llama": "https://huggingface.co/4yo1/llama3-eng-ko-8-llama", | |
| "PracticeLLM/Custom-KoLLM-13B-v2": "https://huggingface.co/PracticeLLM/Custom-KoLLM-13B-v2", | |
| "kyujinpy/KOR-Orca-Platypus-13B-v2": "https://huggingface.co/kyujinpy/KOR-Orca-Platypus-13B-v2", | |
| "ghost-x/ghost-7b-alpha": "https://huggingface.co/ghost-x/ghost-7b-alpha", | |
| "HumanF-MarkrAI/pub-llama-13B-v6": "https://huggingface.co/HumanF-MarkrAI/pub-llama-13B-v6", | |
| "nlpai-lab/kullm-polyglot-5.8b-v2": "https://huggingface.co/nlpai-lab/kullm-polyglot-5.8b-v2", | |
| "maywell/Synatra-42dot-1.3B": "https://huggingface.co/maywell/Synatra-42dot-1.3B", | |
| "yhkim9362/gemma-en-ko-7b-v0.1": "https://huggingface.co/yhkim9362/gemma-en-ko-7b-v0.1", | |
| "yhkim9362/gemma-en-ko-7b-v0.2": "https://huggingface.co/yhkim9362/gemma-en-ko-7b-v0.2", | |
| "daekeun-ml/Llama-2-ko-OpenOrca-gugugo-13B": "https://huggingface.co/daekeun-ml/Llama-2-ko-OpenOrca-gugugo-13B", | |
| "beomi/Yi-Ko-6B": "https://huggingface.co/beomi/Yi-Ko-6B", | |
| "jojo0217/ChatSKKU5.8B": "https://huggingface.co/jojo0217/ChatSKKU5.8B", | |
| "Deepnoid/deep-solar-v2.0.7": "https://huggingface.co/Deepnoid/deep-solar-v2.0.7", | |
| "01-ai/Yi-1.5-9B": "https://huggingface.co/01-ai/Yi-1.5-9B", | |
| "PracticeLLM/Custom-KoLLM-13B-v4": "https://huggingface.co/PracticeLLM/Custom-KoLLM-13B-v4", | |
| "nuebaek/komt_mistral_mss_user_0_max_steps_80": "https://huggingface.co/nuebaek/komt_mistral_mss_user_0_max_steps_80", | |
| "dltjdgh0928/lsh_finetune_v0.11": "https://huggingface.co/dltjdgh0928/lsh_finetune_v0.11", | |
| "shleeeee/mistral-7b-wiki": "https://huggingface.co/shleeeee/mistral-7b-wiki", | |
| "nayohan/polyglot-ko-5.8b-Inst": "https://huggingface.co/nayohan/polyglot-ko-5.8b-Inst", | |
| "ifuseok/sft-solar-10.7b-v1.1": "https://huggingface.co/ifuseok/sft-solar-10.7b-v1.1", | |
| "Junmai/KIT-5.8b": "https://huggingface.co/Junmai/KIT-5.8b", | |
| "heegyu/polyglot-ko-3.8b-chat": "https://huggingface.co/heegyu/polyglot-ko-3.8b-chat", | |
| "etri-xainlp/polyglot-ko-12.8b-instruct": "https://huggingface.co/etri-xainlp/polyglot-ko-12.8b-instruct", | |
| "OpenBuddy/openbuddy-mistral2-7b-v20.3-32k": "https://huggingface.co/OpenBuddy/openbuddy-mistral2-7b-v20.3-32k", | |
| "sh2orc/Llama-3-Korean-8B": "https://huggingface.co/sh2orc/Llama-3-Korean-8B", | |
| "Deepnoid/deep-solar-eeve-v2.0.0": "https://huggingface.co/Deepnoid/deep-solar-eeve-v2.0.0", | |
| "Herry443/Mistral-7B-KNUT-ref": "https://huggingface.co/Herry443/Mistral-7B-KNUT-ref", | |
| "heegyu/polyglot-ko-5.8b-chat": "https://huggingface.co/heegyu/polyglot-ko-5.8b-chat", | |
| "jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.5.3": "https://huggingface.co/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.5.3", | |
| "DILAB-HYU/KoQuality-Polyglot-5.8b": "https://huggingface.co/DILAB-HYU/KoQuality-Polyglot-5.8b", | |
| "Byungchae/k2s3_test_0000": "https://huggingface.co/Byungchae/k2s3_test_0000", | |
| "migtissera/Tess-v2.5-Phi-3-medium-128k-14B": "https://huggingface.co/migtissera/Tess-v2.5-Phi-3-medium-128k-14B", | |
| "kyujinpy/Korean-OpenOrca-13B": "https://huggingface.co/kyujinpy/Korean-OpenOrca-13B", | |
| "kyujinpy/KO-Platypus2-13B": "https://huggingface.co/kyujinpy/KO-Platypus2-13B", | |
| "jin05102518/Astral-7B-Instruct-v0.01": "https://huggingface.co/jin05102518/Astral-7B-Instruct-v0.01", | |
| "Byungchae/k2s3_test_0002": "https://huggingface.co/Byungchae/k2s3_test_0002", | |
| "NousResearch/Nous-Hermes-llama-2-7b": "https://huggingface.co/NousResearch/Nous-Hermes-llama-2-7b", | |
| "kaist-ai/prometheus-13b-v1.0": "https://huggingface.co/kaist-ai/prometheus-13b-v1.0", | |
| "sel303/llama3-diverce-ver1.0": "https://huggingface.co/sel303/llama3-diverce-ver1.0", | |
| "NousResearch/Nous-Capybara-7B": "https://huggingface.co/NousResearch/Nous-Capybara-7B", | |
| "rrw-x2/KoSOLAR-10.7B-DPO-v1.0": "https://huggingface.co/rrw-x2/KoSOLAR-10.7B-DPO-v1.0", | |
| "Edentns/DataVortexS-10.7B-v0.2": "https://huggingface.co/Edentns/DataVortexS-10.7B-v0.2", | |
| "Jsoo/Llama3-beomi-Open-Ko-8B-Instruct-preview-test6": "https://huggingface.co/Jsoo/Llama3-beomi-Open-Ko-8B-Instruct-preview-test6", | |
| "tlphams/gollm-instruct-all-in-one-v1": "https://huggingface.co/tlphams/gollm-instruct-all-in-one-v1", | |
| "Edentns/DataVortexTL-1.1B-v0.1": "https://huggingface.co/Edentns/DataVortexTL-1.1B-v0.1", | |
| "richard-park/llama3-pre1-ds": "https://huggingface.co/richard-park/llama3-pre1-ds", | |
| "ehartford/samantha-1.1-llama-33b": "https://huggingface.co/ehartford/samantha-1.1-llama-33b", | |
| "heegyu/LIMA-13b-hf": "https://huggingface.co/heegyu/LIMA-13b-hf", | |
| "heegyu/42dot_LLM-PLM-1.3B-mt": "https://huggingface.co/heegyu/42dot_LLM-PLM-1.3B-mt", | |
| "shleeeee/mistral-ko-7b-wiki-neft": "https://huggingface.co/shleeeee/mistral-ko-7b-wiki-neft", | |
| "EleutherAI/polyglot-ko-1.3b": "https://huggingface.co/EleutherAI/polyglot-ko-1.3b", | |
| "kyujinpy/Ko-PlatYi-6B-gu": "https://huggingface.co/kyujinpy/Ko-PlatYi-6B-gu", | |
| "sel303/llama3-diverce-ver1.6": "https://huggingface.co/sel303/llama3-diverce-ver1.6" | |
| } | |
| def get_korea_models(): | |
| """Korea ๊ด๋ จ ๋ชจ๋ธ ๊ฒ์""" | |
| params = { | |
| "search": "korea", | |
| "full": "True", | |
| "config": "True", | |
| "limit": 1000 | |
| } | |
| try: | |
| response = requests.get( | |
| "https://huggingface.co/api/models", | |
| headers={'Accept': 'application/json'}, | |
| params=params | |
| ) | |
| if response.status_code == 200: | |
| return response.json() | |
| else: | |
| print(f"Failed to fetch Korea models: {response.status_code}") | |
| return [] | |
| except Exception as e: | |
| print(f"Error fetching Korea models: {str(e)}") | |
| return [] | |
| def get_all_models(limit=3000): | |
| """๋ชจ๋ ๋ชจ๋ธ๊ณผ Korea ๊ด๋ จ ๋ชจ๋ธ ๊ฐ์ ธ์ค๊ธฐ""" | |
| all_models = [] | |
| page_size = 1000 # API์ ํ ๋ฒ ์์ฒญ๋น ์ต๋ ํฌ๊ธฐ | |
| # ์ฌ๋ฌ ํ์ด์ง์ ๊ฑธ์ณ ๋ฐ์ดํฐ ์์ง | |
| for offset in range(0, limit, page_size): | |
| params = { | |
| 'limit': min(page_size, limit - offset), | |
| 'full': 'True', | |
| 'config': 'True', | |
| 'offset': offset | |
| } | |
| response = requests.get( | |
| "https://huggingface.co/api/models", | |
| headers={'Accept': 'application/json'}, | |
| params=params | |
| ) | |
| if response.status_code == 200: | |
| all_models.extend(response.json()) | |
| print(f"Fetched models {offset+1} to {offset+len(response.json())}") | |
| else: | |
| print(f"Failed to fetch models at offset {offset}: {response.status_code}") | |
| break | |
| # Korea ๊ฒ์ ๊ฒฐ๊ณผ๋ ๋์ผํ๊ฒ ํ์ฅ | |
| korea_params = { | |
| "search": "korea", | |
| "full": "True", | |
| "config": "True", | |
| "limit": limit | |
| } | |
| korea_response = requests.get( | |
| "https://huggingface.co/api/models", | |
| headers={'Accept': 'application/json'}, | |
| params=korea_params | |
| ) | |
| if korea_response.status_code == 200: | |
| korea_models = korea_response.json() | |
| print(f"Fetched {len(korea_models)} Korea-related models") | |
| # ์ค๋ณต ์ ๊ฑฐํ๋ฉด์ Korea ๋ชจ๋ธ ์ถ๊ฐ | |
| existing_ids = {model.get('id', '') for model in all_models} | |
| for model in korea_models: | |
| if model.get('id', '') not in existing_ids: | |
| all_models.append(model) | |
| existing_ids.add(model.get('id', '')) | |
| print(f"Total unique models: {len(all_models)}") | |
| return all_models[:limit] | |
| def get_models_data(progress=gr.Progress()): | |
| def calculate_rank(model_id, all_global_models, korea_models): | |
| # ๊ธ๋ก๋ฒ ์์ ํ์ธ | |
| global_rank = next((idx for idx, m in enumerate(all_global_models, 1) | |
| if m.get('id', '').strip() == model_id.strip()), None) | |
| # Korea ๋ชจ๋ธ์ธ ๊ฒฝ์ฐ | |
| is_korea = any(m.get('id', '').strip() == model_id.strip() for m in korea_models) | |
| if is_korea: | |
| # Korea ๋ชจ๋ธ ์ค์์์ ์์ ํ์ธ | |
| korea_rank = next((idx for idx, m in enumerate(korea_models, 1) | |
| if m.get('id', '').strip() == model_id.strip()), None) | |
| if korea_rank: | |
| return min(global_rank or 3001, korea_rank + 1000), True | |
| return global_rank if global_rank else 'Not in top 3000', is_korea | |
| try: | |
| progress(0, desc="Fetching models...") | |
| if not HF_TOKEN: | |
| fig = create_error_plot() | |
| error_html = """ | |
| <div style='padding: 20px; background: #fee; border-radius: 10px; margin: 10px 0;'> | |
| <h3 style='color: #c00;'>โ ๏ธ API ์ธ์ฆ์ด ํ์ํฉ๋๋ค</h3> | |
| <p>HuggingFace API ํ ํฐ์ด ์ค์ ๋์ง ์์์ต๋๋ค. ์์ ํ ๊ธฐ๋ฅ์ ์ฌ์ฉํ๊ธฐ ์ํด์๋ API ํ ํฐ์ด ํ์ํฉ๋๋ค.</p> | |
| </div> | |
| """ | |
| empty_df = pd.DataFrame(columns=['Global Rank', 'Model ID', 'Title', 'Downloads', 'Likes', 'Korea Search', 'URL']) | |
| return fig, error_html, empty_df | |
| # ์ผ๋ฐ ๋ชจ๋ธ๊ณผ Korea ๊ด๋ จ ๋ชจ๋ธ ๋ชจ๋ ๊ฐ์ ธ์ค๊ธฐ (3000์๊น์ง) | |
| all_global_models = get_all_models(limit=3000) | |
| korea_models = get_korea_models() | |
| print(f"Total global models fetched: {len(all_global_models)}") | |
| print(f"Total Korea models fetched: {len(korea_models)}") | |
| # ๋ชจ๋ ๋ชจ๋ธ ํตํฉ (์ค๋ณต ์ ๊ฑฐ) | |
| all_models = all_global_models.copy() | |
| existing_ids = {model.get('id', '') for model in all_global_models} | |
| added_korea_models = 0 | |
| for korea_model in korea_models: | |
| if korea_model.get('id', '') not in existing_ids: | |
| all_models.append(korea_model) | |
| existing_ids.add(korea_model.get('id', '')) | |
| added_korea_models += 1 | |
| print(f"Added {added_korea_models} unique Korea models") | |
| print(f"Total combined models: {len(all_models)}") | |
| # ์๊ฐํ๋ฅผ ์ํ Figure ์์ฑ | |
| fig = go.Figure() | |
| # ์์ ์ ๋ณด ์์ง | |
| filtered_models = [] | |
| for model_id in target_models.keys(): | |
| try: | |
| normalized_id = model_id.strip('/') | |
| model_url_api = f"https://huggingface.co/api/models/{normalized_id}" | |
| response = requests.get( | |
| model_url_api, | |
| headers={'Accept': 'application/json'} | |
| ) | |
| if response.status_code == 200: | |
| model_data = response.json() | |
| rank, is_korea = calculate_rank(model_id, all_global_models, korea_models) | |
| filtered_models.append({ | |
| 'id': model_id, | |
| 'global_rank': rank, | |
| 'downloads': model_data.get('downloads', 0), | |
| 'likes': model_data.get('likes', 0), | |
| 'title': model_data.get('title', 'No Title'), | |
| 'is_korea': is_korea | |
| }) | |
| print(f"Model {model_id}: Rank={rank}, Is Korea={is_korea}") | |
| else: | |
| filtered_models.append({ | |
| 'id': model_id, | |
| 'global_rank': 'Not in top 3000', | |
| 'downloads': 0, | |
| 'likes': 0, | |
| 'title': 'No Title', | |
| 'is_korea': False | |
| }) | |
| except Exception as e: | |
| print(f"Error processing {model_id}: {str(e)}") | |
| continue | |
| # ์์๋ก ์ ๋ ฌ | |
| filtered_models.sort(key=lambda x: float('inf') if isinstance(x['global_rank'], str) else x['global_rank']) | |
| # ์๊ฐํ ๋ฐ์ดํฐ ์ค๋น | |
| valid_models = [m for m in filtered_models if isinstance(m['global_rank'], (int, float))] | |
| if valid_models: | |
| ids = [m['id'] for m in valid_models] | |
| ranks = [m['global_rank'] for m in valid_models] | |
| fig.add_trace(go.Bar( | |
| x=ids, | |
| y=[3001 - r for r in ranks], # Y์ถ ๋ฒ์ 3000๊น์ง ํ์ฅ | |
| text=[f"Rank: #{r}<br>Downloads: {format(m['downloads'], ',')}<br>Likes: {format(m['likes'], ',')}" | |
| for r, m in zip(ranks, valid_models)], | |
| textposition='auto', | |
| marker_color=['rgba(255,0,0,0.6)' if m['is_korea'] else 'rgba(0,0,255,0.6)' | |
| for m in valid_models], | |
| opacity=0.8 | |
| )) | |
| fig.update_layout( | |
| title="HuggingFace Models Global Rankings (Up to #3000)", | |
| xaxis_title="Model ID", | |
| yaxis_title="Global Rank", | |
| yaxis=dict( | |
| ticktext=[f"#{i}" for i in range(1, 3001, 100)], | |
| tickvals=[3001 - i for i in range(1, 3001, 100)], | |
| range=[0, 3000] | |
| ), | |
| height=800, | |
| showlegend=False, | |
| template='plotly_white', | |
| xaxis_tickangle=-45 | |
| ) | |
| # HTML ์นด๋ ์์ฑ | |
| html_content = """ | |
| <div style='padding: 20px; background: #f5f5f5;'> | |
| <h2 style='color: #2c3e50;'>Models Rankings (Up to #3000)</h2> | |
| <div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'> | |
| """ | |
| for model in filtered_models: | |
| rank_display = f"Global Rank #{model['global_rank']}" if isinstance(model['global_rank'], (int, float)) else "Not in top 3000" | |
| korea_badge = "๐ฐ๐ท Korea Search Result" if model['is_korea'] else "" | |
| html_content += f""" | |
| <div style=' | |
| background: white; | |
| padding: 20px; | |
| border-radius: 10px; | |
| box-shadow: 0 2px 4px rgba(0,0,0,0.1); | |
| transition: transform 0.2s; | |
| {f"border: 2px solid #e74c3c;" if model['is_korea'] else ""} | |
| '> | |
| <h3 style='color: #34495e;'>{rank_display}</h3> | |
| <h4 style='color: #2c3e50;'>{model['id']}</h4> | |
| <p style='color: #e74c3c; font-weight: bold;'>{korea_badge}</p> | |
| <p style='color: #7f8c8d;'>โฌ๏ธ Downloads: {format(model['downloads'], ',')}</p> | |
| <p style='color: #7f8c8d;'>๐ Likes: {format(model['likes'], ',')}</p> | |
| <a href='{target_models[model['id']]}' | |
| target='_blank' | |
| style=' | |
| display: inline-block; | |
| padding: 8px 16px; | |
| background: #3498db; | |
| color: white; | |
| text-decoration: none; | |
| border-radius: 5px; | |
| transition: background 0.3s; | |
| '> | |
| Visit Model ๐ | |
| </a> | |
| </div> | |
| """ | |
| html_content += "</div></div>" | |
| # ๋ฐ์ดํฐํ๋ ์ ์์ฑ | |
| df = pd.DataFrame([{ | |
| 'Global Rank': f"#{m['global_rank']}" if isinstance(m['global_rank'], (int, float)) else m['global_rank'], | |
| 'Model ID': m['id'], | |
| 'Title': m['title'], | |
| 'Downloads': format(m['downloads'], ','), | |
| 'Likes': format(m['likes'], ','), | |
| 'Korea Search': '๐ฐ๐ท' if m['is_korea'] else '', | |
| 'URL': target_models[m['id']] | |
| } for m in filtered_models]) | |
| progress(1.0, desc="Complete!") | |
| return fig, html_content, df | |
| except Exception as e: | |
| print(f"Error in get_models_data: {str(e)}") | |
| error_fig = create_error_plot() | |
| error_html = f""" | |
| <div style='padding: 20px; background: #fee; border-radius: 10px; margin: 10px 0;'> | |
| <h3 style='color: #c00;'>โ ๏ธ ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค</h3> | |
| <p>{str(e)}</p> | |
| </div> | |
| """ | |
| empty_df = pd.DataFrame(columns=['Global Rank', 'Model ID', 'Title', 'Downloads', 'Likes', 'Korea Search', 'URL']) | |
| return error_fig, error_html, empty_df | |
| # ๊ด์ฌ ์คํ์ด์ค URL ๋ฆฌ์คํธ์ ์ ๋ณด | |
| target_spaces = { | |
| "VIDraft/ChemGenesis": "https://huggingface.co/spaces/VIDraft/ChemGenesis", | |
| "seawolf2357/ocrlatex": "https://huggingface.co/spaces/seawolf2357/ocrlatex", | |
| "seawolf2357/img2vid": "https://huggingface.co/spaces/seawolf2357/img2vid", | |
| "seawolf2357/sd-prompt-gen": "https://huggingface.co/spaces/seawolf2357/sd-prompt-gen", | |
| "openfree/badassgi": "https://huggingface.co/spaces/openfree/badassgi", | |
| "openfree/tarotcard": "https://huggingface.co/spaces/openfree/tarotcard", | |
| "openfree/drqxab": "https://huggingface.co/spaces/openfree/drqxab", | |
| "aiqcamp/Polaroid": "https://huggingface.co/spaces/aiqcamp/Polaroid", | |
| "ginigen/cartoon": "https://huggingface.co/spaces/ginigen/cartoon", | |
| "ginigen/Book-Cover": "https://huggingface.co/spaces/ginigen/Book-Cover", | |
| "aiqcamp/fash": "https://huggingface.co/spaces/aiqcamp/fash", | |
| "gunship999/Korea-Daily-News": "https://huggingface.co/spaces/gunship999/Korea-Daily-News", | |
| "kolaslab/Quantum": "https://huggingface.co/spaces/kolaslab/Quantum", | |
| "openfree/webtoon": "https://huggingface.co/spaces/openfree/webtoon", | |
| "immunobiotech/ChicagoGallery": "https://huggingface.co/spaces/immunobiotech/ChicagoGallery", | |
| "immunobiotech/MetropolitanMuseum": "https://huggingface.co/spaces/immunobiotech/MetropolitanMuseum", | |
| "immunobiotech/opensky": "https://huggingface.co/spaces/immunobiotech/opensky", | |
| "kolaslab/Audio-Visualizer": "https://huggingface.co/spaces/kolaslab/Audio-Visualizer", | |
| "kolaslab/Radio-Learning": "https://huggingface.co/spaces/kolaslab/Radio-Learning", | |
| "kolaslab/Future-Gallaxy": "https://huggingface.co/spaces/kolaslab/Future-Gallaxy", | |
| "openfree/ProteinGenesis": "https://huggingface.co/spaces/openfree/ProteinGenesis", | |
| "openfree/2025saju": "https://huggingface.co/spaces/openfree/2025saju", | |
| "ginigen/Dokdo-membership": "https://huggingface.co/spaces/ginigen/Dokdo-membership", | |
| "VIDraft/eum": "https://huggingface.co/spaces/VIDraft/eum", | |
| "kolaslab/VisionART": "https://huggingface.co/spaces/kolaslab/VisionART", | |
| "aiqtech/FLUX-military": "https://huggingface.co/spaces/aiqtech/FLUX-military", | |
| "fantaxy/Rolls-Royce": "https://huggingface.co/spaces/fantaxy/Rolls-Royce", | |
| "seawolf2357/flux-korea-hanbok-lora": "https://huggingface.co/spaces/seawolf2357/flux-korea-hanbok-lora", | |
| "seawolf2357/flux-korea-palace-lora": "https://huggingface.co/spaces/seawolf2357/flux-korea-palace-lora", | |
| "aiqcamp/flux-cat-lora": "https://huggingface.co/spaces/aiqcamp/flux-cat-lora", | |
| "gunship999/SexyImages": "https://huggingface.co/spaces/gunship999/SexyImages", | |
| "aiqtech/flux-claude-monet-lora": "https://huggingface.co/spaces/aiqtech/flux-claude-monet-lora", | |
| "ginigen/CANVAS-o3": "https://huggingface.co/spaces/ginigen/CANVAS-o3", | |
| "kolaslab/world-sdr": "https://huggingface.co/spaces/kolaslab/world-sdr", | |
| "seawolf2357/3D-Avatar-Generator": "https://huggingface.co/spaces/seawolf2357/3D-Avatar-Generator", | |
| "fantaxy/playground25": "https://huggingface.co/spaces/fantaxy/playground25", | |
| "openfree/ultpixgen": "https://huggingface.co/spaces/openfree/ultpixgen", | |
| "kolaslab/VISION-NIGHT": "https://huggingface.co/spaces/kolaslab/VISION-NIGHT", | |
| "kolaslab/FLUX-WEB": "https://huggingface.co/spaces/kolaslab/FLUX-WEB", | |
| "seawolf2357/REALVISXL-V5": "https://huggingface.co/spaces/seawolf2357/REALVISXL-V5", | |
| "ginipick/Dokdo-multimodal": "https://huggingface.co/spaces/ginipick/Dokdo-multimodal", | |
| "ginigen/theater": "https://huggingface.co/spaces/ginigen/theater", | |
| "VIDraft/stock": "https://huggingface.co/spaces/VIDraft/stock", | |
| "fantos/flxcontrol": "https://huggingface.co/spaces/fantos/flxcontrol", | |
| "fantos/textcutobject": "https://huggingface.co/spaces/fantos/textcutobject", | |
| "ginipick/FLUX-Prompt-Generator": "https://huggingface.co/spaces/ginipick/FLUX-Prompt-Generator", | |
| "fantaxy/flxloraexp": "https://huggingface.co/spaces/fantaxy/flxloraexp", | |
| "fantos/flxloraexp": "https://huggingface.co/spaces/fantos/flxloraexp", | |
| "seawolf2357/flxloraexp": "https://huggingface.co/spaces/seawolf2357/flxloraexp", | |
| "ginipick/flxloraexp": "https://huggingface.co/spaces/ginipick/flxloraexp", | |
| "ginipick/FLUX-Prompt-Generator": "https://huggingface.co/spaces/ginipick/FLUX-Prompt-Generator", | |
| "ginigen/Dokdo": "https://huggingface.co/spaces/ginigen/Dokdo", | |
| "aiqcamp/imagemagic": "https://huggingface.co/spaces/aiqcamp/imagemagic", | |
| "openfree/ColorRevive": "https://huggingface.co/spaces/openfree/ColorRevive", | |
| "VIDraft/RAGOndevice": "https://huggingface.co/spaces/VIDraft/RAGOndevice", | |
| "gunship999/Radar-Bluetooth": "https://huggingface.co/spaces/gunship999/Radar-Bluetooth", | |
| "gunship999/WiFi-VISION": "https://huggingface.co/spaces/gunship999/WiFi-VISION", | |
| "gunship999/SONAR-Radar": "https://huggingface.co/spaces/gunship999/SONAR-Radar", | |
| "aiqcamp/AudioLlama": "https://huggingface.co/spaces/aiqcamp/AudioLlama", | |
| "ginigen/FLUXllama-Multilingual": "https://huggingface.co/spaces/ginigen/FLUXllama-Multilingual", | |
| "ginipick/ginimedi": "https://huggingface.co/spaces/ginipick/ginimedi", | |
| "ginipick/ginilaw": "https://huggingface.co/spaces/ginipick/ginilaw", | |
| "ginipick/ginipharm": "https://huggingface.co/spaces/ginipick/ginipharm", | |
| "ginipick/FitGen": "https://huggingface.co/spaces/ginipick/FitGen", | |
| "fantaxy/FLUX-Animations": "https://huggingface.co/spaces/fantaxy/FLUX-Animations", | |
| "fantaxy/Remove-Video-Background": "https://huggingface.co/spaces/fantaxy/Remove-Video-Background", | |
| "fantaxy/ofai-flx-logo": "https://huggingface.co/spaces/fantaxy/ofai-flx-logo", | |
| "fantaxy/flx-pulid": "https://huggingface.co/spaces/fantaxy/flx-pulid", | |
| "fantaxy/flx-upscale": "https://huggingface.co/spaces/fantaxy/flx-upscale", | |
| "aiqcamp/Fashion-FLUX": "https://huggingface.co/spaces/aiqcamp/Fashion-FLUX", | |
| "ginipick/StyleGen": "https://huggingface.co/spaces/ginipick/StyleGen", | |
| "openfree/StoryStar": "https://huggingface.co/spaces/openfree/StoryStar", | |
| "fantos/x-mas": "https://huggingface.co/spaces/fantos/x-mas", | |
| "openfree/Korean-Leaderboard": "https://huggingface.co/spaces/openfree/Korean-Leaderboard", | |
| "ginipick/FLUXllama": "https://huggingface.co/spaces/ginipick/FLUXllama", | |
| "ginipick/SORA-3D": "https://huggingface.co/spaces/ginipick/SORA-3D", | |
| "fantaxy/Sound-AI-SFX": "https://huggingface.co/spaces/fantaxy/Sound-AI-SFX", | |
| "fantos/flx8lora": "https://huggingface.co/spaces/fantos/flx8lora", | |
| "ginigen/Canvas": "https://huggingface.co/spaces/ginigen/Canvas", | |
| "fantaxy/erotica": "https://huggingface.co/spaces/fantaxy/erotica", | |
| "ginipick/time-machine": "https://huggingface.co/spaces/ginipick/time-machine", | |
| "aiqcamp/FLUX-VisionReply": "https://huggingface.co/spaces/aiqcamp/FLUX-VisionReply", | |
| "openfree/Tetris-Game": "https://huggingface.co/spaces/openfree/Tetris-Game", | |
| "openfree/everychat": "https://huggingface.co/spaces/openfree/everychat", | |
| "VIDraft/mouse1": "https://huggingface.co/spaces/VIDraft/mouse1", | |
| "kolaslab/alpha-go": "https://huggingface.co/spaces/kolaslab/alpha-go", | |
| "ginipick/text3d": "https://huggingface.co/spaces/ginipick/text3d", | |
| "openfree/trending-board": "https://huggingface.co/spaces/openfree/trending-board", | |
| "cutechicken/tankwar": "https://huggingface.co/spaces/cutechicken/tankwar", | |
| "openfree/game-jewel": "https://huggingface.co/spaces/openfree/game-jewel", | |
| "VIDraft/mouse-chat": "https://huggingface.co/spaces/VIDraft/mouse-chat", | |
| "ginipick/AccDiffusion": "https://huggingface.co/spaces/ginipick/AccDiffusion", | |
| "aiqtech/Particle-Accelerator-Simulation": "https://huggingface.co/spaces/aiqtech/Particle-Accelerator-Simulation", | |
| "openfree/GiniGEN": "https://huggingface.co/spaces/openfree/GiniGEN", | |
| "kolaslab/3DAudio-Spectrum-Analyzer": "https://huggingface.co/spaces/kolaslab/3DAudio-Spectrum-Analyzer", | |
| "openfree/trending-news-24": "https://huggingface.co/spaces/openfree/trending-news-24", | |
| "ginipick/Realtime-FLUX": "https://huggingface.co/spaces/ginipick/Realtime-FLUX", | |
| "VIDraft/prime-number": "https://huggingface.co/spaces/VIDraft/prime-number", | |
| "kolaslab/zombie-game": "https://huggingface.co/spaces/kolaslab/zombie-game", | |
| "fantos/miro-game": "https://huggingface.co/spaces/fantos/miro-game", | |
| "kolaslab/shooting": "https://huggingface.co/spaces/kolaslab/shooting", | |
| "VIDraft/Mouse-Hackathon": "https://huggingface.co/spaces/VIDraft/Mouse-Hackathon", | |
| "aiqmaster/stocksimulation": "https://huggingface.co/spaces/aiqmaster/stocksimulation", | |
| "aiqmaster/assetai": "https://huggingface.co/spaces/aiqmaster/assetai", | |
| "aiqmaster/stockai": "https://huggingface.co/spaces/aiqmaster/stockai", | |
| "cutechicken/TankWar3D": "https://huggingface.co/spaces/cutechicken/TankWar3D", | |
| "kolaslab/RC4-EnDecoder": "https://huggingface.co/spaces/kolaslab/RC4-EnDecoder", | |
| "kolaslab/simulator": "https://huggingface.co/spaces/kolaslab/simulator", | |
| "kolaslab/calculator": "https://huggingface.co/spaces/kolaslab/calculator", | |
| "aiqtech/kofaceid": "https://huggingface.co/spaces/aiqtech/kofaceid", | |
| "fantaxy/fastvideogena": "https://huggingface.co/spaces/fantaxy/fastvideogen", | |
| "fantos/cogvidx": "https://huggingface.co/spaces/fantos/cogvidx", | |
| "fantos/flxfashmodel": "https://huggingface.co/spaces/fantos/flxfashmodel", | |
| "fantos/kolcontrl": "https://huggingface.co/spaces/fantos/kolcontrl", | |
| "fantos/EveryText": "https://huggingface.co/spaces/fantos/EveryText", | |
| "aiqtech/cinevid": "https://huggingface.co/spaces/aiqtech/cinevid", | |
| "aiqtech/FLUX-Ghibli-Studio-LoRA": "https://huggingface.co/spaces/aiqtech/FLUX-Ghibli-Studio-LoRA", | |
| "aiqtech/flxgif": "https://huggingface.co/spaces/aiqtech/flxgif", | |
| "aiqtech/imaginpaint": "https://huggingface.co/spaces/aiqtech/imaginpaint", | |
| "upstage/open-ko-llm-leaderboard": "https://huggingface.co/spaces/upstage/open-ko-llm-leaderboard", | |
| "LGAI-EXAONE/EXAONE-3.5-Instruct-Demo": "https://huggingface.co/spaces/LGAI-EXAONE/EXAONE-3.5-Instruct-Demo", | |
| "LeeSangHoon/HierSpeech_TTS": "https://huggingface.co/spaces/LeeSangHoon/HierSpeech_TTS", | |
| "etri-vilab/Ko-LLaVA": "https://huggingface.co/spaces/etri-vilab/Ko-LLaVA", | |
| "etri-vilab/KOALA": "https://huggingface.co/spaces/etri-vilab/KOALA", | |
| "naver-clova-ix/donut-base-finetuned-cord-v2": "https://huggingface.co/spaces/naver-clova-ix/donut-base-finetuned-cord-v2", | |
| "NCSOFT/VARCO_Arena": "https://huggingface.co/spaces/NCSOFT/VARCO_Arena" | |
| } | |
| def get_spaces_data(sort_type="trending", progress=gr.Progress()): | |
| """์คํ์ด์ค ๋ฐ์ดํฐ ๊ฐ์ ธ์ค๊ธฐ (trending ๋๋ modes)""" | |
| url = "https://huggingface.co/api/spaces" | |
| params = { | |
| 'full': 'true', | |
| 'limit': 500 | |
| } | |
| if sort_type == "modes": | |
| params['sort'] = 'likes' | |
| try: | |
| progress(0, desc=f"Fetching {sort_type} spaces data...") | |
| response = requests.get(url, params=params) | |
| response.raise_for_status() | |
| all_spaces = response.json() | |
| # ์์ ์ ๋ณด ์ ์ฅ | |
| space_ranks = {} | |
| for idx, space in enumerate(all_spaces, 1): | |
| space_id = space.get('id', '') | |
| if space_id in target_spaces: | |
| space['rank'] = idx | |
| space_ranks[space_id] = space | |
| spaces = [space_ranks[space_id] for space_id in space_ranks.keys()] | |
| spaces.sort(key=lambda x: x['rank']) | |
| progress(0.3, desc="Creating visualization...") | |
| # ์๊ฐํ ์์ฑ | |
| fig = go.Figure() | |
| # ๋ฐ์ดํฐ ์ค๋น | |
| ids = [space['id'] for space in spaces] | |
| ranks = [space['rank'] for space in spaces] | |
| likes = [space.get('likes', 0) for space in spaces] | |
| titles = [space.get('cardData', {}).get('title') or space.get('title', 'No Title') for space in spaces] | |
| # ๋ง๋ ๊ทธ๋ํ ์์ฑ | |
| fig.add_trace(go.Bar( | |
| x=ids, | |
| y=ranks, | |
| text=[f"Rank: {r}<br>Title: {t}<br>Likes: {l}" | |
| for r, t, l in zip(ranks, titles, likes)], | |
| textposition='auto', | |
| marker_color='rgb(158,202,225)', | |
| opacity=0.8 | |
| )) | |
| fig.update_layout( | |
| title={ | |
| 'text': f'Hugging Face Spaces {sort_type.title()} Rankings (Top 500)', | |
| 'y':0.95, | |
| 'x':0.5, | |
| 'xanchor': 'center', | |
| 'yanchor': 'top' | |
| }, | |
| xaxis_title='Space ID', | |
| yaxis_title='Rank', | |
| yaxis=dict( | |
| autorange='reversed', # Y์ถ์ ๋ฐ์ | |
| tickmode='array', | |
| ticktext=[str(i) for i in range(1, 501, 20)], # 1๋ถํฐ 400๊น์ง 20 ๊ฐ๊ฒฉ์ผ๋ก ํ์ | |
| tickvals=[i for i in range(1, 501, 20)], | |
| range=[1, 500] # Y์ถ ๋ฒ์๋ฅผ 1๋ถํฐ 400๊น์ง๋ก ์ค์ | |
| ), | |
| height=800, | |
| showlegend=False, | |
| template='plotly_white', | |
| xaxis_tickangle=-45 | |
| ) | |
| progress(0.6, desc="Creating space cards...") | |
| # HTML ์นด๋ ์์ฑ | |
| html_content = f""" | |
| <div style='padding: 20px; background: #f5f5f5;'> | |
| <h2 style='color: #2c3e50;'>{sort_type.title()} Rankings</h2> | |
| <div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'> | |
| """ | |
| for space in spaces: | |
| space_id = space['id'] | |
| rank = space['rank'] | |
| title = space.get('cardData', {}).get('title') or space.get('title', 'No Title') | |
| likes = space.get('likes', 0) | |
| html_content += f""" | |
| <div style=' | |
| background: white; | |
| padding: 20px; | |
| border-radius: 10px; | |
| box-shadow: 0 2px 4px rgba(0,0,0,0.1); | |
| transition: transform 0.2s; | |
| '> | |
| <h3 style='color: #34495e;'>Rank #{rank} - {space_id}</h3> | |
| <h4 style=' | |
| color: #2980b9; | |
| margin: 10px 0; | |
| font-size: 1.2em; | |
| font-weight: bold; | |
| text-shadow: 1px 1px 2px rgba(0,0,0,0.1); | |
| background: linear-gradient(to right, #3498db, #2980b9); | |
| -webkit-background-clip: text; | |
| -webkit-text-fill-color: transparent; | |
| padding: 5px 0; | |
| '>{title}</h4> | |
| <p style='color: #7f8c8d; margin-bottom: 10px;'>๐ Likes: {likes}</p> | |
| <a href='{target_spaces[space_id]}' | |
| target='_blank' | |
| style=' | |
| display: inline-block; | |
| padding: 8px 16px; | |
| background: #3498db; | |
| color: white; | |
| text-decoration: none; | |
| border-radius: 5px; | |
| transition: background 0.3s; | |
| '> | |
| Visit Space ๐ | |
| </a> | |
| </div> | |
| """ | |
| html_content += "</div></div>" | |
| # ๋ฐ์ดํฐํ๋ ์ ์์ฑ | |
| df = pd.DataFrame([{ | |
| 'Rank': space['rank'], | |
| 'Space ID': space['id'], | |
| 'Title': space.get('cardData', {}).get('title') or space.get('title', 'No Title'), | |
| 'Likes': space.get('likes', 0), | |
| 'URL': target_spaces[space['id']] | |
| } for space in spaces]) | |
| progress(1.0, desc="Complete!") | |
| return fig, html_content, df | |
| except Exception as e: | |
| print(f"Error in get_spaces_data: {str(e)}") | |
| error_html = f'<div style="color: red; padding: 20px;">Error: {str(e)}</div>' | |
| error_plot = create_error_plot() | |
| return error_plot, error_html, pd.DataFrame() | |
| def create_trend_visualization(spaces_data): | |
| if not spaces_data: | |
| return create_error_plot() | |
| fig = go.Figure() | |
| # ์์ ๋ฐ์ดํฐ ์ค๋น | |
| ranks = [] | |
| for idx, space in enumerate(spaces_data, 1): | |
| space_id = space.get('id', '') | |
| if space_id in target_spaces: | |
| ranks.append({ | |
| 'id': space_id, | |
| 'rank': idx, | |
| 'likes': space.get('likes', 0), | |
| 'title': space.get('title', 'N/A'), | |
| 'views': space.get('views', 0) | |
| }) | |
| if not ranks: | |
| return create_error_plot() | |
| # ์์๋ณ๋ก ์ ๋ ฌ | |
| ranks.sort(key=lambda x: x['rank']) | |
| # ํ๋กฏ ๋ฐ์ดํฐ ์์ฑ | |
| ids = [r['id'] for r in ranks] | |
| rank_values = [r['rank'] for r in ranks] | |
| likes = [r['likes'] for r in ranks] | |
| views = [r['views'] for r in ranks] | |
| # ๋ง๋ ๊ทธ๋ํ ์์ฑ | |
| fig.add_trace(go.Bar( | |
| x=ids, | |
| y=rank_values, | |
| text=[f"Rank: {r}<br>Likes: {l}<br>Views: {v}" for r, l, v in zip(rank_values, likes, views)], | |
| textposition='auto', | |
| marker_color='rgb(158,202,225)', | |
| opacity=0.8 | |
| )) | |
| fig.update_layout( | |
| title={ | |
| 'text': 'Current Trending Ranks (All Target Spaces)', | |
| 'y':0.95, | |
| 'x':0.5, | |
| 'xanchor': 'center', | |
| 'yanchor': 'top' | |
| }, | |
| xaxis_title='Space ID', | |
| yaxis_title='Trending Rank', | |
| yaxis_autorange='reversed', | |
| height=800, | |
| showlegend=False, | |
| template='plotly_white', | |
| xaxis_tickangle=-45 | |
| ) | |
| return fig | |
| # ํ ํฐ์ด ์๋ ๊ฒฝ์ฐ๋ฅผ ์ํ ๋์ฒด ํจ์ | |
| def get_trending_spaces_without_token(): | |
| try: | |
| url = "https://huggingface.co/api/spaces" | |
| params = { | |
| 'sort': 'likes', | |
| 'direction': -1, | |
| 'limit': 500, | |
| 'full': 'true' | |
| } | |
| response = requests.get(url, params=params) | |
| if response.status_code == 200: | |
| return response.json() | |
| else: | |
| print(f"API ์์ฒญ ์คํจ (ํ ํฐ ์์): {response.status_code}") | |
| print(f"Response: {response.text}") | |
| return None | |
| except Exception as e: | |
| print(f"API ํธ์ถ ์ค ์๋ฌ ๋ฐ์ (ํ ํฐ ์์): {str(e)}") | |
| return None | |
| # API ํ ํฐ ์ค์ ๋ฐ ํจ์ ์ ํ | |
| if not HF_TOKEN: | |
| get_trending_spaces = get_trending_spaces_without_token | |
| def create_error_plot(): | |
| fig = go.Figure() | |
| fig.add_annotation( | |
| text="๋ฐ์ดํฐ๋ฅผ ๋ถ๋ฌ์ฌ ์ ์์ต๋๋ค.\n(API ์ธ์ฆ์ด ํ์ํฉ๋๋ค)", | |
| xref="paper", | |
| yref="paper", | |
| x=0.5, | |
| y=0.5, | |
| showarrow=False, | |
| font=dict(size=20) | |
| ) | |
| fig.update_layout( | |
| title="Error Loading Data", | |
| height=400 | |
| ) | |
| return fig | |
| def create_space_info_html(spaces_data): | |
| if not spaces_data: | |
| return "<div style='padding: 20px;'><h2>๋ฐ์ดํฐ๋ฅผ ๋ถ๋ฌ์ค๋๋ฐ ์คํจํ์ต๋๋ค.</h2></div>" | |
| html_content = """ | |
| <div style='padding: 20px;'> | |
| <h2 style='color: #2c3e50;'>Current Trending Rankings</h2> | |
| <div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'> | |
| """ | |
| # ๋ชจ๋ target spaces๋ฅผ ํฌํจํ๋๋ก ์์ | |
| for space_id in target_spaces.keys(): | |
| space_info = next((s for s in spaces_data if s.get('id') == space_id), None) | |
| if space_info: | |
| rank = next((idx for idx, s in enumerate(spaces_data, 1) if s.get('id') == space_id), 'N/A') | |
| html_content += f""" | |
| <div style=' | |
| background: white; | |
| padding: 20px; | |
| border-radius: 10px; | |
| box-shadow: 0 2px 4px rgba(0,0,0,0.1); | |
| transition: transform 0.2s; | |
| '> | |
| <h3 style='color: #34495e;'>#{rank} - {space_id}</h3> | |
| <p style='color: #7f8c8d;'>๐ Likes: {space_info.get('likes', 'N/A')}</p> | |
| <p style='color: #7f8c8d;'>๐ Views: {space_info.get('views', 'N/A')}</p> | |
| <p style='color: #2c3e50;'>{space_info.get('title', 'N/A')}</p> | |
| <p style='color: #7f8c8d; font-size: 0.9em;'>{space_info.get('description', 'N/A')[:100]}...</p> | |
| <a href='{target_spaces[space_id]}' | |
| target='_blank' | |
| style=' | |
| display: inline-block; | |
| padding: 8px 16px; | |
| background: #3498db; | |
| color: white; | |
| text-decoration: none; | |
| border-radius: 5px; | |
| transition: background 0.3s; | |
| '> | |
| Visit Space ๐ | |
| </a> | |
| </div> | |
| """ | |
| else: | |
| html_content += f""" | |
| <div style=' | |
| background: #f8f9fa; | |
| padding: 20px; | |
| border-radius: 10px; | |
| box-shadow: 0 2px 4px rgba(0,0,0,0.1); | |
| '> | |
| <h3 style='color: #34495e;'>{space_id}</h3> | |
| <p style='color: #7f8c8d;'>Not in trending</p> | |
| <a href='{target_spaces[space_id]}' | |
| target='_blank' | |
| style=' | |
| display: inline-block; | |
| padding: 8px 16px; | |
| background: #95a5a6; | |
| color: white; | |
| text-decoration: none; | |
| border-radius: 5px; | |
| '> | |
| Visit Space ๐ | |
| </a> | |
| </div> | |
| """ | |
| html_content += "</div></div>" | |
| return html_content | |
| def create_data_table(spaces_data): | |
| if not spaces_data: | |
| return pd.DataFrame() | |
| rows = [] | |
| for idx, space in enumerate(spaces_data, 1): | |
| space_id = space.get('id', '') | |
| if space_id in target_spaces: | |
| rows.append({ | |
| 'Rank': idx, | |
| 'Space ID': space_id, | |
| 'Likes': space.get('likes', 'N/A'), | |
| 'Title': space.get('title', 'N/A'), | |
| 'URL': target_spaces[space_id] | |
| }) | |
| return pd.DataFrame(rows) | |
| def refresh_data(): | |
| spaces_data = get_trending_spaces() | |
| if spaces_data: | |
| plot = create_trend_visualization(spaces_data) | |
| info = create_space_info_html(spaces_data) | |
| df = create_data_table(spaces_data) | |
| return plot, info, df | |
| else: | |
| return create_error_plot(), "<div>API ์ธ์ฆ์ด ํ์ํฉ๋๋ค.</div>", pd.DataFrame() | |
| def create_registration_bar_chart(data, type_name="Spaces"): | |
| try: | |
| # TOP ๊ธฐ์ค ์ค์ | |
| top_limit = 500 if type_name == "Spaces" else 3000 | |
| # DataFrame์ธ ๊ฒฝ์ฐ ์ฒ๋ฆฌ | |
| if isinstance(data, pd.DataFrame): | |
| if type_name == "Models": | |
| # 3000์ ์ด๋ด์ ๋ชจ๋ธ๋ง ํํฐ๋ง | |
| data = data[data['Global Rank'].apply(lambda x: isinstance(x, (int, float)) or (isinstance(x, str) and x.startswith('#')))] | |
| data = data[data['Global Rank'].apply(lambda x: int(str(x).replace('#', '')) if isinstance(x, str) else x) <= top_limit] | |
| elif type_name == "Spaces": | |
| # 500์ ์ด๋ด์ ์คํ์ด์ค๋ง ํํฐ๋ง | |
| data = data[data['Rank'].apply(lambda x: isinstance(x, (int, float))) & (data['Rank'] <= top_limit)] | |
| # ID ์ปฌ๋ผ ์ ํ | |
| id_column = 'Space ID' if type_name == "Spaces" else 'Model ID' | |
| registrations = data[id_column].apply(lambda x: x.split('/')[0]).value_counts() | |
| else: | |
| # ๋ฆฌ์คํธ๋ ๋ค๋ฅธ ํํ์ ๋ฐ์ดํฐ์ธ ๊ฒฝ์ฐ ์ฒ๋ฆฌ | |
| registrations = {} | |
| for item in data: | |
| if isinstance(item, dict): | |
| rank = item.get('global_rank' if type_name == "Models" else 'rank') | |
| if isinstance(rank, str) or rank > top_limit: | |
| continue | |
| creator = item.get('id', '').split('/')[0] | |
| registrations[creator] = registrations.get(creator, 0) + 1 | |
| registrations = pd.Series(registrations) | |
| # ์ ๋ ฌ๋ ๋ฐ์ดํฐ ์ค๋น | |
| registrations = registrations.sort_values(ascending=False) | |
| fig = go.Figure(data=[go.Bar( | |
| x=registrations.index, | |
| y=registrations.values, | |
| text=registrations.values, | |
| textposition='auto', | |
| marker_color='#FF6B6B' | |
| )]) | |
| fig.update_layout( | |
| title=f"Korean {type_name} Registrations by Creator (Top {top_limit})", | |
| xaxis_title="Creator ID", | |
| yaxis_title="Number of Registrations", | |
| showlegend=False, | |
| height=400, | |
| width=700 | |
| ) | |
| return fig | |
| except Exception as e: | |
| print(f"Error in create_registration_bar_chart: {str(e)}") | |
| return go.Figure() | |
| def create_pie_chart(data, total_count, type_name="Spaces"): | |
| try: | |
| # TOP ๊ธฐ์ค ์ค์ | |
| top_limit = 500 if type_name == "Spaces" else 3000 | |
| # DataFrame์ธ ๊ฒฝ์ฐ ์ฒ๋ฆฌ | |
| if isinstance(data, pd.DataFrame): | |
| if type_name == "Models": | |
| # 3000์ ์ด๋ด์ ๋ชจ๋ธ๋ง ํํฐ๋ง | |
| data = data[data['Global Rank'].apply(lambda x: isinstance(x, (int, float)) or (isinstance(x, str) and x.startswith('#')))] | |
| data = data[data['Global Rank'].apply(lambda x: int(str(x).replace('#', '')) if isinstance(x, str) else x) <= top_limit] | |
| elif type_name == "Spaces": | |
| # 500์ ์ด๋ด์ ์คํ์ด์ค๋ง ํํฐ๋ง | |
| data = data[data['Rank'].apply(lambda x: isinstance(x, (int, float))) & (data['Rank'] <= top_limit)] | |
| korean_count = len(data) | |
| else: | |
| # ๋ฆฌ์คํธ๋ ๋ค๋ฅธ ํํ์ ๋ฐ์ดํฐ์ธ ๊ฒฝ์ฐ ์ฒ๋ฆฌ | |
| if type_name == "Models": | |
| korean_count = sum(1 for item in data if isinstance(item.get('global_rank'), (int, float)) and item.get('global_rank') <= top_limit) | |
| else: | |
| korean_count = sum(1 for item in data if isinstance(item.get('rank'), (int, float)) and item.get('rank') <= top_limit) | |
| other_count = total_count - korean_count | |
| fig = go.Figure(data=[go.Pie( | |
| labels=[f'Korean {type_name} in Top {top_limit}', f'Other {type_name} in Top {top_limit}'], | |
| values=[korean_count, other_count], | |
| hole=.3, | |
| marker_colors=['#FF6B6B', '#4ECDC4'], | |
| textinfo='percent+value', | |
| hovertemplate="<b>%{label}</b><br>" + | |
| "Count: %{value}<br>" + | |
| "Percentage: %{percent}<br>" | |
| )]) | |
| fig.update_layout( | |
| title=f"Korean vs Other {type_name} Distribution (Top {top_limit})", | |
| showlegend=True, | |
| height=400, | |
| width=500 | |
| ) | |
| return fig | |
| except Exception as e: | |
| print(f"Error in create_pie_chart: {str(e)}") | |
| return go.Figure() | |
| def refresh_all_data(): | |
| spaces_results = get_spaces_data("trending") | |
| models_results = get_models_data() | |
| # Spaces ์ฐจํธ ์์ฑ | |
| spaces_pie = create_pie_chart(spaces_results[2], 500, "Spaces") | |
| spaces_bar = create_registration_bar_chart(spaces_results[2], "Spaces") | |
| # Models ์ฐจํธ ์์ฑ | |
| models_pie = create_pie_chart(models_results[2], 3000, "Models") | |
| models_bar = create_registration_bar_chart(models_results[2], "Models") | |
| return [ | |
| spaces_results[0], spaces_results[1], spaces_results[2], | |
| spaces_pie, spaces_bar, | |
| models_results[0], models_results[1], models_results[2], | |
| models_pie, models_bar | |
| ] | |
| with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=""" | |
| #spaces_pie, #models_pie { | |
| min-height: 400px; | |
| border-radius: 10px; | |
| box-shadow: 0 2px 4px rgba(0,0,0,0.1); | |
| } | |
| #spaces_bar, #models_bar { | |
| min-height: 400px; | |
| border-radius: 10px; | |
| box-shadow: 0 2px 4px rgba(0,0,0,0.1); | |
| } | |
| """) as demo: | |
| gr.Markdown(""" | |
| # ๐ค ํ๊น ํ์ด์ค 'ํ๊ตญ(์ธ์ด) ๋ฆฌ๋๋ณด๋' | |
| HuggingFace๊ฐ ์ ๊ณตํ๋ Spaces์ Models ์ค์๊ฐ ์ธ๊ธฐ ์์ ๋ฐ์ํ์ฌ 'ํ๊ตญ์ธ(๊ธฐ์ /์ธ์ด)'์ ๋ฆฌ์คํธ(๊ณต๊ฐ,๊ฒ์,๋ฆฌ๋๋ณด๋ ๋ฑ)๋ง ๋ถ์. (c)'ํ๊ตญ์ธ๊ณต์ง๋ฅ์งํฅํํ' / ์์ฒญ: [email protected] | |
| """) | |
| # ์ด๋ฏธ์ง์ ์ค๋ช ์ถ๊ฐ | |
| gr.Markdown(""" | |
| ### [Hot NEWS] ํ๊น ํ์ด์ค ์ ์ 12์ 'TOP 12'์ ํ๊ตญ 'ginipick'์ 'FLUXllama'์ 'Text3D' 2์ข ์ด ์ ์ ๋จ | |
| """) | |
| gr.Image("HF-TOP12.png", show_label=False) | |
| # ์๋ก ๊ณ ์นจ ๋ฒํผ (๊ธฐ์กด ์ฝ๋) | |
| refresh_btn = gr.Button("๐ ์๋ก ๊ณ ์นจ", variant="primary") | |
| with gr.Tab("Spaces Trending"): | |
| trending_plot = gr.Plot() | |
| with gr.Row(): | |
| # ์ํ ๊ทธ๋ํ์ ๋ง๋ ๊ทธ๋ํ๋ฅผ ์ํ ์ปจํ ์ด๋ ์ถ๊ฐ | |
| with gr.Column(scale=1): | |
| spaces_pie_chart = gr.Plot( | |
| label="Korean Spaces Distribution", | |
| elem_id="spaces_pie" | |
| ) | |
| with gr.Column(scale=2): | |
| spaces_bar_chart = gr.Plot( | |
| label="Registrations by Creator", | |
| elem_id="spaces_bar" | |
| ) | |
| trending_info = gr.HTML() | |
| trending_df = gr.DataFrame( | |
| headers=["Rank", "Space ID", "Title", "Likes", "URL"], | |
| datatype=["number", "str", "str", "number", "str"], | |
| row_count=(10, "dynamic") | |
| ) | |
| with gr.Tab("Models Trending"): | |
| models_plot = gr.Plot() | |
| with gr.Row(): | |
| # ์ํ ๊ทธ๋ํ์ ๋ง๋ ๊ทธ๋ํ๋ฅผ ์ํ ์ปจํ ์ด๋ ์ถ๊ฐ | |
| with gr.Column(scale=1): | |
| models_pie_chart = gr.Plot( | |
| label="Korean Models Distribution", | |
| elem_id="models_pie" | |
| ) | |
| with gr.Column(scale=2): | |
| models_bar_chart = gr.Plot( | |
| label="Registrations by Creator", | |
| elem_id="models_bar" | |
| ) | |
| models_info = gr.HTML() | |
| models_df = gr.DataFrame( | |
| headers=["Global Rank", "Model ID", "Title", "Downloads", "Likes", "Korea Search", "URL"], | |
| datatype=["str", "str", "str", "str", "str", "str", "str"], | |
| row_count=(10, "dynamic") | |
| ) | |
| def refresh_all_data(): | |
| try: | |
| spaces_results = get_spaces_data("trending") | |
| models_results = get_models_data() | |
| # Spaces ์ฐจํธ ์์ฑ | |
| spaces_pie = create_pie_chart(spaces_results[2], 500, "Spaces") | |
| spaces_bar = create_registration_bar_chart(spaces_results[2], "Spaces") | |
| # Models ์ฐจํธ ์์ฑ | |
| models_pie = create_pie_chart(models_results[2], 3000, "Models") | |
| models_bar = create_registration_bar_chart(models_results[2], "Models") | |
| return [ | |
| spaces_results[0], spaces_results[1], spaces_results[2], | |
| spaces_pie, spaces_bar, | |
| models_results[0], models_results[1], models_results[2], | |
| models_pie, models_bar | |
| ] | |
| except Exception as e: | |
| print(f"Error in refresh_all_data: {str(e)}") | |
| # ์๋ฌ ๋ฐ์ ์ ๊ธฐ๋ณธ๊ฐ ๋ฐํ | |
| return [None] * 10 | |
| # ์๋ก๊ณ ์นจ ๋ฒํผ ํด๋ฆญ ์ด๋ฒคํธ ํธ๋ค๋ฌ | |
| refresh_btn.click( | |
| fn=refresh_all_data, | |
| outputs=[ | |
| trending_plot, trending_info, trending_df, | |
| spaces_pie_chart, spaces_bar_chart, | |
| models_plot, models_info, models_df, | |
| models_pie_chart, models_bar_chart | |
| ] | |
| ) | |
| # ์ด๊ธฐ ๋ฐ์ดํฐ ๋ก๋ | |
| try: | |
| initial_data = refresh_all_data() | |
| # ์ด๊ธฐ๊ฐ ์ค์ | |
| trending_plot.value = initial_data[0] | |
| trending_info.value = initial_data[1] | |
| trending_df.value = initial_data[2] | |
| spaces_pie_chart.value = initial_data[3] | |
| spaces_bar_chart.value = initial_data[4] | |
| models_plot.value = initial_data[5] | |
| models_info.value = initial_data[6] | |
| models_df.value = initial_data[7] | |
| models_pie_chart.value = initial_data[8] | |
| models_bar_chart.value = initial_data[9] | |
| except Exception as e: | |
| print(f"Error loading initial data: {str(e)}") | |
| gr.Warning("์ด๊ธฐ ๋ฐ์ดํฐ ๋ก๋ ์ค ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค.") | |
| # Gradio ์ฑ ์คํ | |
| demo.launch( | |
| server_name="0.0.0.0", | |
| server_port=7860, | |
| share=False, | |
| show_error=True | |
| ) |