Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -13,8 +13,8 @@ def format_prompt(message, history):
|
|
13 |
prompt_prefix = "Please correct the grammar in the following sentence:"
|
14 |
prompt_template = "[INST] " + prompt_prefix + " {} [/INST]"
|
15 |
|
16 |
-
|
17 |
-
|
18 |
|
19 |
# Iterates through every past user input and response to be added to the prompt
|
20 |
for user_prompt, bot_response in history:
|
@@ -28,14 +28,14 @@ def format_prompt(message, history):
|
|
28 |
return prompt
|
29 |
|
30 |
def generate(prompt, history, system_prompt, temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0):
|
31 |
-
print("
|
32 |
temperature = float(temperature)
|
33 |
if temperature < 1e-2:
|
34 |
temperature = 1e-2
|
35 |
top_p = float(top_p)
|
36 |
|
37 |
generate_kwargs = dict(temperature=temperature, max_new_tokens=max_new_tokens, top_p=top_p, repetition_penalty=repetition_penalty, do_sample=True, seed=42,)
|
38 |
-
|
39 |
formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
|
40 |
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
|
41 |
output = ""
|
@@ -46,6 +46,7 @@ def generate(prompt, history, system_prompt, temperature=0.9, max_new_tokens=256
|
|
46 |
return output
|
47 |
|
48 |
|
|
|
49 |
additional_inputs=[
|
50 |
gr.Textbox( label="System Prompt", value="Correct the following sentence to make it grammatically accurate while maintaining the original meaning. Output only the corrected sentence." , max_lines=1, interactive=True, ),
|
51 |
gr.Slider( label="Temperature", value=0.9, minimum=0.0, maximum=1.0, step=0.05, interactive=True, info="Higher values produce more diverse outputs", ),
|
|
|
13 |
prompt_prefix = "Please correct the grammar in the following sentence:"
|
14 |
prompt_template = "[INST] " + prompt_prefix + " {} [/INST]"
|
15 |
|
16 |
+
history.append("It is my friends house in England.", "It is my friend's house in England.")
|
17 |
+
history.append("Every girl must bring their books to school.", "Every girl must bring her books to school.")
|
18 |
|
19 |
# Iterates through every past user input and response to be added to the prompt
|
20 |
for user_prompt, bot_response in history:
|
|
|
28 |
return prompt
|
29 |
|
30 |
def generate(prompt, history, system_prompt, temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0):
|
31 |
+
print("\nSystem Prompt: '{}'".format(system_prompt))
|
32 |
temperature = float(temperature)
|
33 |
if temperature < 1e-2:
|
34 |
temperature = 1e-2
|
35 |
top_p = float(top_p)
|
36 |
|
37 |
generate_kwargs = dict(temperature=temperature, max_new_tokens=max_new_tokens, top_p=top_p, repetition_penalty=repetition_penalty, do_sample=True, seed=42,)
|
38 |
+
|
39 |
formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
|
40 |
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
|
41 |
output = ""
|
|
|
46 |
return output
|
47 |
|
48 |
|
49 |
+
|
50 |
additional_inputs=[
|
51 |
gr.Textbox( label="System Prompt", value="Correct the following sentence to make it grammatically accurate while maintaining the original meaning. Output only the corrected sentence." , max_lines=1, interactive=True, ),
|
52 |
gr.Slider( label="Temperature", value=0.9, minimum=0.0, maximum=1.0, step=0.05, interactive=True, info="Higher values produce more diverse outputs", ),
|