Spaces:
Sleeping
Sleeping
Update main.py
Browse files
main.py
CHANGED
@@ -13,7 +13,7 @@ def mask_generation(text,model=model_large,tokenizer=tokenizer_large):
|
|
13 |
import re
|
14 |
inputs = ["Mask Generation: " + text+'.']
|
15 |
inputs = tokenizer(inputs, max_length=512, truncation=True, return_tensors="pt")
|
16 |
-
output = model.generate(**inputs, num_beams=8, do_sample=True, max_length=
|
17 |
decoded_output = tokenizer.batch_decode(output, skip_special_tokens=True)[0]
|
18 |
predicted_title = decoded_output.strip()
|
19 |
pattern = r'\[.*?\]'
|
|
|
13 |
import re
|
14 |
inputs = ["Mask Generation: " + text+'.']
|
15 |
inputs = tokenizer(inputs, max_length=512, truncation=True, return_tensors="pt")
|
16 |
+
output = model.generate(**inputs, num_beams=8, do_sample=True, max_length=len(text))
|
17 |
decoded_output = tokenizer.batch_decode(output, skip_special_tokens=True)[0]
|
18 |
predicted_title = decoded_output.strip()
|
19 |
pattern = r'\[.*?\]'
|