Update README.md
Browse files
README.md
CHANGED
@@ -51,7 +51,7 @@ inputs = tokenizer(
|
|
51 |
|
52 |
# Generate the sequences
|
53 |
with torch.inference_mode():
|
54 |
-
outputs = model.generate(**inputs, max_new_tokens=32)
|
55 |
|
56 |
# Decode the generated sequences
|
57 |
decoded_sequences = tokenizer.batch_decode(outputs, skip_special_tokens=True)
|
|
|
51 |
|
52 |
# Generate the sequences
|
53 |
with torch.inference_mode():
|
54 |
+
outputs = model.generate(**inputs, max_new_tokens=32, temperature=0.00001, top_k=1)
|
55 |
|
56 |
# Decode the generated sequences
|
57 |
decoded_sequences = tokenizer.batch_decode(outputs, skip_special_tokens=True)
|