update model name in examples
Browse files
README.md
CHANGED
@@ -149,7 +149,7 @@ Use the code below to get started with the model. You can run conversational inf
|
|
149 |
import transformers
|
150 |
import torch
|
151 |
|
152 |
-
model_id = "HPAI-BSC/
|
153 |
|
154 |
pipeline = transformers.pipeline(
|
155 |
"text-generation",
|
@@ -191,7 +191,7 @@ print(outputs[0]["generated_text"][len(prompt):])
|
|
191 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
192 |
import torch
|
193 |
|
194 |
-
model_id = "HPAI-BSC/
|
195 |
|
196 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
197 |
model = AutoModelForCausalLM.from_pretrained(
|
|
|
149 |
import transformers
|
150 |
import torch
|
151 |
|
152 |
+
model_id = "HPAI-BSC/Llama3.1-Aloe-Beta-70B"
|
153 |
|
154 |
pipeline = transformers.pipeline(
|
155 |
"text-generation",
|
|
|
191 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
192 |
import torch
|
193 |
|
194 |
+
model_id = "HPAI-BSC/Llama3.1-Aloe-Beta-70B"
|
195 |
|
196 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
197 |
model = AutoModelForCausalLM.from_pretrained(
|