from transformers import AutoModel, AutoTokenizer
import os
from huggingface_hub import HfApi
api = HfApi()

# Specify the model name and the directory where you want to save it
MODEL_NAME = "TinyLlama/TinyLlama-1.1B-Chat-v1.0"  # Example model name
save_directory = "./tinyllama"  # Specify your folder path here

# Download the model and tokenizer
model = AutoModel.from_pretrained(MODEL_NAME)
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)

# Save the model and tokenizer to the specified directory

repo_id = "medmekk/BitNet.cpp"

api.upload_folder(
    folder_path="models/tinyllama",
    repo_id="medmekk/BitNet.cpp",
    repo_type="space",
)