Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -188,20 +188,20 @@ def improve_summary_generation(text, model, tokenizer):
|
|
188 |
# Validate the summary
|
189 |
if not validate_summary(processed_summary, text):
|
190 |
# Retry with alternate generation parameters
|
191 |
-
|
192 |
-
|
193 |
-
|
194 |
-
|
195 |
-
|
196 |
-
|
197 |
-
|
198 |
-
|
199 |
-
|
200 |
-
|
201 |
-
|
202 |
-
|
203 |
-
|
204 |
-
|
205 |
|
206 |
|
207 |
return processed_summary
|
|
|
188 |
# Validate the summary
|
189 |
if not validate_summary(processed_summary, text):
|
190 |
# Retry with alternate generation parameters
|
191 |
+
with torch.no_grad():
|
192 |
+
summary_ids = model.generate(
|
193 |
+
input_ids=inputs["input_ids"],
|
194 |
+
attention_mask=inputs["attention_mask"],
|
195 |
+
max_length=250,
|
196 |
+
min_length=50,
|
197 |
+
num_beams=4,
|
198 |
+
length_penalty=2.0,
|
199 |
+
no_repeat_ngram_size=4,
|
200 |
+
temperature=0.8,
|
201 |
+
repetition_penalty=1.5,
|
202 |
+
)
|
203 |
+
summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
|
204 |
+
processed_summary = post_process_summary(summary)
|
205 |
|
206 |
|
207 |
return processed_summary
|