Spaces:
Sleeping
Sleeping
Lautaro Cardarelli
commited on
Commit
·
77f3032
1
Parent(s):
2b32104
fix model
Browse files
app.py
CHANGED
|
@@ -6,7 +6,7 @@ tokenizer = BartTokenizer.from_pretrained('facebook/bart-large-cnn')
|
|
| 6 |
model = BartForConditionalGeneration.from_pretrained('facebook/bart-large-cnn')
|
| 7 |
|
| 8 |
|
| 9 |
-
def generate_summary(text
|
| 10 |
inputs = tokenizer.encode("summarize: " + text, return_tensors="pt", max_length=1024, truncation=True)
|
| 11 |
summary_ids = model.generate(inputs, max_length=150, min_length=50, length_penalty=2.0, num_beams=4, early_stopping=True)
|
| 12 |
summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
|
|
@@ -14,7 +14,7 @@ def generate_summary(text, model, tokenizer):
|
|
| 14 |
|
| 15 |
|
| 16 |
def process(text):
|
| 17 |
-
return generate_summary(text
|
| 18 |
|
| 19 |
|
| 20 |
textbox = gr.Textbox(label="Pega el text aca:", placeholder="Texto...", lines=15)
|
|
|
|
| 6 |
model = BartForConditionalGeneration.from_pretrained('facebook/bart-large-cnn')
|
| 7 |
|
| 8 |
|
| 9 |
+
def generate_summary(text):
|
| 10 |
inputs = tokenizer.encode("summarize: " + text, return_tensors="pt", max_length=1024, truncation=True)
|
| 11 |
summary_ids = model.generate(inputs, max_length=150, min_length=50, length_penalty=2.0, num_beams=4, early_stopping=True)
|
| 12 |
summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
|
|
|
|
| 14 |
|
| 15 |
|
| 16 |
def process(text):
|
| 17 |
+
return generate_summary(text)
|
| 18 |
|
| 19 |
|
| 20 |
textbox = gr.Textbox(label="Pega el text aca:", placeholder="Texto...", lines=15)
|