File size: 228 Bytes
18cc6ba
 
 
 
 
1
2
3
4
5
# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("CYFRAGOVPL/PLLuM-12B-chat")
model = AutoModelForCausalLM.from_pretrained("CYFRAGOVPL/PLLuM-12B-chat")