hf_hub_api_demo / upload_tokenizer.py
louiecerv's picture
sync with remove
fa189e6
raw
history blame contribute delete
240 Bytes
from transformers import AutoTokenizer
model_checkpoint = "distilbert-base-uncased"
tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)
# Push tokenizer to the model repo
tokenizer.push_to_hub("louiecerv/sentiment_analysis_model")