Spaces:
Runtime error
Runtime error
Commit
·
e51bc2f
1
Parent(s):
1aa8621
use pipelines in generator
Browse files- generator.py +7 -6
generator.py
CHANGED
|
@@ -43,7 +43,7 @@ def load_model():
|
|
| 43 |
return hfm, hft, tok, model
|
| 44 |
|
| 45 |
hfmodel, hftokenizer, tok, model = load_model()
|
| 46 |
-
|
| 47 |
def run_model(input_string, **generator_args):
|
| 48 |
generator_args = {
|
| 49 |
"max_length": 256,
|
|
@@ -53,11 +53,12 @@ def run_model(input_string, **generator_args):
|
|
| 53 |
"early_stopping": False,
|
| 54 |
}
|
| 55 |
# tokenizer = att.from_pretrained("ThomasSimonini/t5-end2end-question-generation")
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
output =
|
|
|
|
| 61 |
return output
|
| 62 |
|
| 63 |
|
|
|
|
| 43 |
return hfm, hft, tok, model
|
| 44 |
|
| 45 |
hfmodel, hftokenizer, tok, model = load_model()
|
| 46 |
+
nlp = pipeline("e2e-qg")
|
| 47 |
def run_model(input_string, **generator_args):
|
| 48 |
generator_args = {
|
| 49 |
"max_length": 256,
|
|
|
|
| 53 |
"early_stopping": False,
|
| 54 |
}
|
| 55 |
# tokenizer = att.from_pretrained("ThomasSimonini/t5-end2end-question-generation")
|
| 56 |
+
output = nlp(input_string)
|
| 57 |
+
# input_string = "generate questions: " + input_string + " </s>"
|
| 58 |
+
# input_ids = hftokenizer.encode(input_string, return_tensors="pt")
|
| 59 |
+
# res = hfmodel.generate(input_ids, **generator_args)
|
| 60 |
+
# output = hftokenizer.batch_decode(res, skip_special_tokens=True)
|
| 61 |
+
# output = [item.split("<sep>") for item in output]
|
| 62 |
return output
|
| 63 |
|
| 64 |
|