Spaces:
Sleeping
Sleeping
feat: use larger model
Browse files
app.py
CHANGED
@@ -11,9 +11,9 @@ from transformers import (
|
|
11 |
# ▸ For a causal code model (no T5 errors):
|
12 |
# MODEL_ID = "Salesforce/codegen-350M-multi"
|
13 |
# ▸ Or, for a seq‑to‑seq model:
|
14 |
-
MODEL_ID = "google/flan-t5-base"
|
15 |
# MODEL_ID = "google/flan-t5-small"
|
16 |
-
|
17 |
|
18 |
# ── 2) Load tokenizer + model ────────────────────────────────────────
|
19 |
tokenizer = AutoTokenizer.from_pretrained(MODEL_ID)
|
|
|
11 |
# ▸ For a causal code model (no T5 errors):
|
12 |
# MODEL_ID = "Salesforce/codegen-350M-multi"
|
13 |
# ▸ Or, for a seq‑to‑seq model:
|
14 |
+
# MODEL_ID = "google/flan-t5-base"
|
15 |
# MODEL_ID = "google/flan-t5-small"
|
16 |
+
MODEL_ID = "Salesforce/codegen-350M-multi"
|
17 |
|
18 |
# ── 2) Load tokenizer + model ────────────────────────────────────────
|
19 |
tokenizer = AutoTokenizer.from_pretrained(MODEL_ID)
|