Spaces:
Running
Running
Update main.py
Browse files
main.py
CHANGED
@@ -57,12 +57,12 @@ def LoggaTesto(log_type, data):
|
|
57 |
@app.post("/Genera")
|
58 |
def generate_text(request: Request, input_data: InputData):
|
59 |
if not input_data.asincrono:
|
60 |
-
LoggaTesto("RICHIESTA SINCRONA", input_data)
|
61 |
temperature = input_data.temperature
|
62 |
max_new_tokens = input_data.max_new_tokens
|
63 |
top_p = input_data.top_p
|
64 |
repetition_penalty = input_data.repetition_penalty
|
65 |
input_text = generate_input_text(input_data)
|
|
|
66 |
max_new_tokens = min(max_new_tokens, 29500 - len(input_text))
|
67 |
history = []
|
68 |
generated_response = generate(input_text, history, temperature, max_new_tokens, top_p, repetition_penalty)
|
|
|
57 |
@app.post("/Genera")
|
58 |
def generate_text(request: Request, input_data: InputData):
|
59 |
if not input_data.asincrono:
|
|
|
60 |
temperature = input_data.temperature
|
61 |
max_new_tokens = input_data.max_new_tokens
|
62 |
top_p = input_data.top_p
|
63 |
repetition_penalty = input_data.repetition_penalty
|
64 |
input_text = generate_input_text(input_data)
|
65 |
+
LoggaTesto("RICHIESTA SINCRONA", input_text)
|
66 |
max_new_tokens = min(max_new_tokens, 29500 - len(input_text))
|
67 |
history = []
|
68 |
generated_response = generate(input_text, history, temperature, max_new_tokens, top_p, repetition_penalty)
|