Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
2 |
-
|
3 |
device = "cuda" # the device to load the model onto
|
4 |
|
5 |
model = AutoModelForCausalLM.from_pretrained("mistralai/Mistral-7B-Instruct-v0.1")
|
@@ -18,4 +18,4 @@ model.to(device)
|
|
18 |
|
19 |
generated_ids = model.generate(model_inputs, max_new_tokens=1000, do_sample=True)
|
20 |
decoded = tokenizer.batch_decode(generated_ids)
|
21 |
-
|
|
|
1 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
2 |
+
import streamlit as st
|
3 |
device = "cuda" # the device to load the model onto
|
4 |
|
5 |
model = AutoModelForCausalLM.from_pretrained("mistralai/Mistral-7B-Instruct-v0.1")
|
|
|
18 |
|
19 |
generated_ids = model.generate(model_inputs, max_new_tokens=1000, do_sample=True)
|
20 |
decoded = tokenizer.batch_decode(generated_ids)
|
21 |
+
st.write(decoded[0])
|