Update README.md
Browse files
README.md
CHANGED
|
@@ -62,7 +62,7 @@ Typically, you can load the models using the Hugging Face `transformers` library
|
|
| 62 |
```python
|
| 63 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 64 |
# Example for one of the models (replace with the specific model name)
|
| 65 |
-
model_name = "ulab-ai/Time-R1-
|
| 66 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
| 67 |
model = AutoModelForCausalLM.from_pretrained(model_name)
|
| 68 |
# Further usage instructions would go here or in the repository
|
|
|
|
| 62 |
```python
|
| 63 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 64 |
# Example for one of the models (replace with the specific model name)
|
| 65 |
+
model_name = "ulab-ai/Time-R1-Theta1_prime" # Or your specific Hugging Face model path
|
| 66 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
| 67 |
model = AutoModelForCausalLM.from_pretrained(model_name)
|
| 68 |
# Further usage instructions would go here or in the repository
|