metastable-void
commited on
Commit
·
4a30925
1
Parent(s):
6371c0b
fix
Browse files
app.py
CHANGED
@@ -32,7 +32,7 @@ if torch.cuda.is_available():
|
|
32 |
@torch.inference_mode()
|
33 |
def generate(
|
34 |
message: str,
|
35 |
-
chat_history
|
36 |
max_new_tokens: int = 1024,
|
37 |
temperature: float = 0.7,
|
38 |
top_p: float = 0.95,
|
@@ -56,7 +56,7 @@ def generate(
|
|
56 |
repetition_penalty=repetition_penalty,
|
57 |
top_k=top_k,
|
58 |
top_p=top_p,
|
59 |
-
)
|
60 |
print(output)
|
61 |
gen_text = output[len(user_input):]
|
62 |
gen_text = gen_text[:gen_text.find("\n")] if "\n" in gen_text else gen_text
|
@@ -65,7 +65,7 @@ def generate(
|
|
65 |
|
66 |
demo = gr.ChatInterface(
|
67 |
fn=generate,
|
68 |
-
type="
|
69 |
additional_inputs_accordion=gr.Accordion(label="詳細設定", open=False),
|
70 |
additional_inputs=[
|
71 |
gr.Slider(
|
|
|
32 |
@torch.inference_mode()
|
33 |
def generate(
|
34 |
message: str,
|
35 |
+
chat_history,
|
36 |
max_new_tokens: int = 1024,
|
37 |
temperature: float = 0.7,
|
38 |
top_p: float = 0.95,
|
|
|
56 |
repetition_penalty=repetition_penalty,
|
57 |
top_k=top_k,
|
58 |
top_p=top_p,
|
59 |
+
)[-1]["generated_text"]
|
60 |
print(output)
|
61 |
gen_text = output[len(user_input):]
|
62 |
gen_text = gen_text[:gen_text.find("\n")] if "\n" in gen_text else gen_text
|
|
|
65 |
|
66 |
demo = gr.ChatInterface(
|
67 |
fn=generate,
|
68 |
+
type="messages",
|
69 |
additional_inputs_accordion=gr.Accordion(label="詳細設定", open=False),
|
70 |
additional_inputs=[
|
71 |
gr.Slider(
|