Update app.py
Browse files
app.py
CHANGED
@@ -7,7 +7,7 @@ from transformers import pipeline
|
|
7 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
8 |
|
9 |
|
10 |
-
st.title('
|
11 |
st.subheader("Commencez la phrase, l'algorithme la termine.")
|
12 |
st.write("Note : la génération du texte prend ~ 5 minutes")
|
13 |
|
@@ -16,7 +16,7 @@ st.write("Note : la génération du texte prend ~ 5 minutes")
|
|
16 |
|
17 |
with st.form("my_form"):
|
18 |
|
19 |
-
text = st.text_input("Début de la phrase :", '
|
20 |
|
21 |
# Every form must have a submit button.
|
22 |
submitted = st.form_submit_button("Générer la suite")
|
@@ -24,7 +24,7 @@ with st.form("my_form"):
|
|
24 |
# Load the model ---
|
25 |
model_checkpoint = "bigscience/bloom-560m"
|
26 |
tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)
|
27 |
-
model = AutoModelForCausalLM.from_pretrained("dan-vdb/
|
28 |
device = torch.device("cpu")
|
29 |
|
30 |
# device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
|
|
|
7 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
8 |
|
9 |
|
10 |
+
st.title('Booba')
|
11 |
st.subheader("Commencez la phrase, l'algorithme la termine.")
|
12 |
st.write("Note : la génération du texte prend ~ 5 minutes")
|
13 |
|
|
|
16 |
|
17 |
with st.form("my_form"):
|
18 |
|
19 |
+
text = st.text_input("Début de la phrase :", "C'était Noël dans la famille")
|
20 |
|
21 |
# Every form must have a submit button.
|
22 |
submitted = st.form_submit_button("Générer la suite")
|
|
|
24 |
# Load the model ---
|
25 |
model_checkpoint = "bigscience/bloom-560m"
|
26 |
tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)
|
27 |
+
model = AutoModelForCausalLM.from_pretrained("dan-vdb/BoobaAI")
|
28 |
device = torch.device("cpu")
|
29 |
|
30 |
# device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
|