valencar commited on
Commit
2bef81d
·
1 Parent(s): 8ef7c37
Files changed (1) hide show
  1. app.py +5 -2
app.py CHANGED
@@ -29,10 +29,13 @@ prompt = "Qual é o maior planeta do sistema solar ?"
29
  inputs = tokenizer(prompt, return_tensors="pt")
30
 
31
  # Generate
32
- generate_ids = model.generate(inputs.input_ids, max_length=30)
33
  output = tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
34
 
35
- st.write(output)
 
 
 
36
 
37
 
38
 
 
29
  inputs = tokenizer(prompt, return_tensors="pt")
30
 
31
  # Generate
32
+ generate_ids = model.generate(inputs.input_ids, max_length=100)
33
  output = tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
34
 
35
+
36
+
37
+ with st.container():
38
+ st.write('\n\n' + output)
39
 
40
 
41