Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Commit
·
1253c3b
1
Parent(s):
8df1d5c
TRUST_REMOTE_CODE env variable
Browse files- src/display/about.py +9 -4
- src/envs.py +2 -0
- src/submission/check_validity.py +2 -2
src/display/about.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
from src.display.utils import ModelType
|
| 2 |
from src.display.utils import Tasks
|
| 3 |
-
from src.envs import REPO_ID, QUEUE_REPO, RESULTS_REPO, PATH_TO_COLLECTION, LEADERBOARD_NAME
|
| 4 |
|
| 5 |
LM_EVAL_URL = "https://github.com/eduagarcia/lm-evaluation-harness-pt"
|
| 6 |
|
|
@@ -81,6 +81,10 @@ To get more information about quantization, see:
|
|
| 81 |
- [Collection of best models](https://huggingface.co/collections/{PATH_TO_COLLECTION})
|
| 82 |
"""
|
| 83 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 84 |
FAQ_TEXT = f"""
|
| 85 |
---------------------------
|
| 86 |
# FAQ
|
|
@@ -88,7 +92,7 @@ Below are some common questions - if this FAQ does not answer you, feel free to
|
|
| 88 |
|
| 89 |
## 1) Submitting a model
|
| 90 |
My model requires `trust_remote_code=True`, can I submit it?
|
| 91 |
-
|
| 92 |
|
| 93 |
What about models of type X?
|
| 94 |
- *We only support models that have been integrated in a stable version of the `transformers` library for automatic submission.*
|
|
@@ -132,6 +136,7 @@ I have an issue about accessing the leaderboard through the Gradio API
|
|
| 132 |
- *Since this is not the recommended way to access the leaderboard, we won't provide support for this, but you can look at tools provided by the community for inspiration!*
|
| 133 |
"""
|
| 134 |
|
|
|
|
| 135 |
|
| 136 |
EVALUATION_QUEUE_TEXT = f"""
|
| 137 |
# Evaluation Queue for the 🚀 {LEADERBOARD_NAME}
|
|
@@ -149,8 +154,8 @@ tokenizer = AutoTokenizer.from_pretrained("your model name", revision=revision)
|
|
| 149 |
```
|
| 150 |
If this step fails, follow the error messages to debug your model before submitting it. It's likely your model has been improperly uploaded.
|
| 151 |
|
| 152 |
-
Note: make sure your model is public!
|
| 153 |
-
|
| 154 |
|
| 155 |
### 2) Convert your model weights to [safetensors](https://huggingface.co/docs/safetensors/index)
|
| 156 |
It's a new format for storing weights which is safer and faster to load and use. It will also allow us to add the number of parameters of your model to the `Extended Viewer`!
|
|
|
|
| 1 |
from src.display.utils import ModelType
|
| 2 |
from src.display.utils import Tasks
|
| 3 |
+
from src.envs import REPO_ID, QUEUE_REPO, RESULTS_REPO, PATH_TO_COLLECTION, LEADERBOARD_NAME, TRUST_REMOTE_CODE
|
| 4 |
|
| 5 |
LM_EVAL_URL = "https://github.com/eduagarcia/lm-evaluation-harness-pt"
|
| 6 |
|
|
|
|
| 81 |
- [Collection of best models](https://huggingface.co/collections/{PATH_TO_COLLECTION})
|
| 82 |
"""
|
| 83 |
|
| 84 |
+
REMOTE_CODE_EXAPLANATION = f"- *Yes.*"
|
| 85 |
+
if not TRUST_REMOTE_CODE:
|
| 86 |
+
REMOTE_CODE_EXAPLANATION = f"- *We only support models that have been integrated in a stable version of the `transformers` library for automatic submission, as we don't want to run possibly unsage code on our cluster.*"
|
| 87 |
+
|
| 88 |
FAQ_TEXT = f"""
|
| 89 |
---------------------------
|
| 90 |
# FAQ
|
|
|
|
| 92 |
|
| 93 |
## 1) Submitting a model
|
| 94 |
My model requires `trust_remote_code=True`, can I submit it?
|
| 95 |
+
{REMOTE_CODE_EXAPLANATION}
|
| 96 |
|
| 97 |
What about models of type X?
|
| 98 |
- *We only support models that have been integrated in a stable version of the `transformers` library for automatic submission.*
|
|
|
|
| 136 |
- *Since this is not the recommended way to access the leaderboard, we won't provide support for this, but you can look at tools provided by the community for inspiration!*
|
| 137 |
"""
|
| 138 |
|
| 139 |
+
REMOTE_CODE_NOTE = "Note: if your model needs `use_remote_code=True`, we do not support this option yet but we are working on adding it, stay posted!"
|
| 140 |
|
| 141 |
EVALUATION_QUEUE_TEXT = f"""
|
| 142 |
# Evaluation Queue for the 🚀 {LEADERBOARD_NAME}
|
|
|
|
| 154 |
```
|
| 155 |
If this step fails, follow the error messages to debug your model before submitting it. It's likely your model has been improperly uploaded.
|
| 156 |
|
| 157 |
+
Note: make sure your model is public!
|
| 158 |
+
{REMOTE_CODE_NOTE if not TRUST_REMOTE_CODE else ""}
|
| 159 |
|
| 160 |
### 2) Convert your model weights to [safetensors](https://huggingface.co/docs/safetensors/index)
|
| 161 |
It's a new format for storing weights which is safer and faster to load and use. It will also allow us to add the number of parameters of your model to the `Extended Viewer`!
|
src/envs.py
CHANGED
|
@@ -36,4 +36,6 @@ RATE_LIMIT_PERIOD = int(os.getenv("RATE_LIMIT_PERIOD", 7))
|
|
| 36 |
RATE_LIMIT_QUOTA = int(os.getenv("RATE_LIMIT_QUOTA", 5))
|
| 37 |
HAS_HIGHER_RATE_LIMIT = os.environ.get("HAS_HIGHER_RATE_LIMIT", "TheBloke").split(',')
|
| 38 |
|
|
|
|
|
|
|
| 39 |
API = HfApi(token=H4_TOKEN)
|
|
|
|
| 36 |
RATE_LIMIT_QUOTA = int(os.getenv("RATE_LIMIT_QUOTA", 5))
|
| 37 |
HAS_HIGHER_RATE_LIMIT = os.environ.get("HAS_HIGHER_RATE_LIMIT", "TheBloke").split(',')
|
| 38 |
|
| 39 |
+
TRUST_REMOTE_CODE = bool(os.getenv("TRUST_REMOTE_CODE", False))
|
| 40 |
+
|
| 41 |
API = HfApi(token=H4_TOKEN)
|
src/submission/check_validity.py
CHANGED
|
@@ -9,7 +9,7 @@ from huggingface_hub import ModelCard
|
|
| 9 |
from huggingface_hub.hf_api import ModelInfo, get_safetensors_metadata
|
| 10 |
from transformers import AutoConfig, AutoTokenizer
|
| 11 |
|
| 12 |
-
from src.envs import HAS_HIGHER_RATE_LIMIT
|
| 13 |
|
| 14 |
|
| 15 |
# ht to @Wauplin, thank you for the snippet!
|
|
@@ -36,7 +36,7 @@ def check_model_card(repo_id: str) -> tuple[bool, str]:
|
|
| 36 |
return True, ""
|
| 37 |
|
| 38 |
|
| 39 |
-
def is_model_on_hub(model_name: str, revision: str, token: str = None, trust_remote_code=
|
| 40 |
try:
|
| 41 |
config = AutoConfig.from_pretrained(model_name, revision=revision, trust_remote_code=trust_remote_code, token=token) #, force_download=True)
|
| 42 |
if test_tokenizer:
|
|
|
|
| 9 |
from huggingface_hub.hf_api import ModelInfo, get_safetensors_metadata
|
| 10 |
from transformers import AutoConfig, AutoTokenizer
|
| 11 |
|
| 12 |
+
from src.envs import HAS_HIGHER_RATE_LIMIT, TRUST_REMOTE_CODE
|
| 13 |
|
| 14 |
|
| 15 |
# ht to @Wauplin, thank you for the snippet!
|
|
|
|
| 36 |
return True, ""
|
| 37 |
|
| 38 |
|
| 39 |
+
def is_model_on_hub(model_name: str, revision: str, token: str = None, trust_remote_code=TRUST_REMOTE_CODE, test_tokenizer=False) -> tuple[bool, str, AutoConfig]:
|
| 40 |
try:
|
| 41 |
config = AutoConfig.from_pretrained(model_name, revision=revision, trust_remote_code=trust_remote_code, token=token) #, force_download=True)
|
| 42 |
if test_tokenizer:
|