Spaces:
Sleeping
Sleeping
import torch | |
from typing import Tuple | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
tokenizer = None | |
model = None | |
def get_model_and_tokenizer() -> Tuple[AutoModelForCausalLM, AutoTokenizer]: | |
global model, tokenizer | |
if model is None or tokenizer is None: | |
# Set device | |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu") | |
# Load the tokenizer and the model | |
tokenizer = AutoTokenizer.from_pretrained("juancopi81/lmd_8bars_tokenizer") | |
model = AutoModelForCausalLM.from_pretrained( | |
"juancopi81/lmd-8bars-2048-epochs40_v4" | |
) | |
# Move model to device | |
model = model.to(device) | |
return model, tokenizer | |