Spaces:
Sleeping
Sleeping
Add Llama-68M-Chat-v1 model
Browse files
app.py
CHANGED
|
@@ -35,7 +35,13 @@ def generate(
|
|
| 35 |
|
| 36 |
return outputs[0]["generated_text"]
|
| 37 |
|
| 38 |
-
model_choices = [
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 39 |
|
| 40 |
g = gr.Interface(
|
| 41 |
fn=generate,
|
|
|
|
| 35 |
|
| 36 |
return outputs[0]["generated_text"]
|
| 37 |
|
| 38 |
+
model_choices = [
|
| 39 |
+
"Felladrin/Llama-160M-Chat-v1",
|
| 40 |
+
"Felladrin/Smol-Llama-101M-Chat-v1",
|
| 41 |
+
"Felladrin/TinyMistral-248M-SFT-v4",
|
| 42 |
+
"Felladrin/Pythia-31M-Chat-v1",
|
| 43 |
+
"Felladrin/Llama-68M-Chat-v1"
|
| 44 |
+
]
|
| 45 |
|
| 46 |
g = gr.Interface(
|
| 47 |
fn=generate,
|