Update main.py
Browse files
main.py
CHANGED
@@ -22,9 +22,11 @@ def generate_humanized_content(content: str) -> str:
|
|
22 |
Rewrite:
|
23 |
"""
|
24 |
inputs = tokenizer(prompt, return_tensors="pt", padding=True, truncation=True)
|
|
|
25 |
output = model.generate(inputs["input_ids"], max_length=1024, num_beams=1, do_sample=True, temperature=0.7, top_k=50, early_stopping=True)
|
|
|
26 |
decoded_output = tokenizer.decode(output[0], skip_special_tokens=True)
|
27 |
-
print("Model output:", decoded_output) # 打印模型输出
|
28 |
return decoded_output
|
29 |
|
30 |
# API端点,接收内容并返回“人性化”后的文本
|
|
|
22 |
Rewrite:
|
23 |
"""
|
24 |
inputs = tokenizer(prompt, return_tensors="pt", padding=True, truncation=True)
|
25 |
+
print("inputs:", inputs) # 打印模型输入
|
26 |
output = model.generate(inputs["input_ids"], max_length=1024, num_beams=1, do_sample=True, temperature=0.7, top_k=50, early_stopping=True)
|
27 |
+
print("output:", output) # 打印模型输出1
|
28 |
decoded_output = tokenizer.decode(output[0], skip_special_tokens=True)
|
29 |
+
print("Model output:", decoded_output) # 打印模型输出2
|
30 |
return decoded_output
|
31 |
|
32 |
# API端点,接收内容并返回“人性化”后的文本
|