File size: 250 Bytes
2a55f6c
 
5633f64
2a55f6c
 
1
2
3
4
5
# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("ibm-granite/granite-3b-code-instruct")
model = AutoModelForCausalLM.from_pretrained("ibm-granite/granite-3b-code-instruct")