Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -13,17 +13,19 @@ pipe = pipeline(
|
|
13 |
"text-generation",
|
14 |
model=modelpath
|
15 |
)
|
16 |
-
messages = [
|
17 |
-
{"role": "system", "content": "You are a customer applying for a housing loan in India. Provide dummy details about your application and negotiate the terms."},
|
18 |
-
{"role": "user", "content": "Hi!Welcome to Hero Housing Finance!"},
|
19 |
-
{"role": "assistant", "content": "Hello, I would like to apply for a loan."},
|
20 |
-
]
|
21 |
#outputs = pipe(
|
22 |
# messages,
|
23 |
# max_new_tokens=256,
|
24 |
#)
|
25 |
#print(outputs[0]["generated_text"][-1])
|
26 |
|
|
|
|
|
27 |
def respond(
|
28 |
message,
|
29 |
history: list[tuple[str, str]],
|
@@ -63,7 +65,7 @@ For information on how to customize the ChatInterface, peruse the gradio docs: h
|
|
63 |
demo = gr.ChatInterface(
|
64 |
respond,
|
65 |
additional_inputs=[
|
66 |
-
gr.Textbox(value="You are a
|
67 |
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
|
68 |
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|
69 |
gr.Slider(
|
|
|
13 |
"text-generation",
|
14 |
model=modelpath
|
15 |
)
|
16 |
+
#messages = [
|
17 |
+
# {"role": "system", "content": "You are a customer applying for a housing loan in India. Provide dummy details about your application and negotiate the terms."},
|
18 |
+
# {"role": "user", "content": "Hi!Welcome to Hero Housing Finance!"},
|
19 |
+
# {"role": "assistant", "content": "Hello, I would like to apply for a loan."},
|
20 |
+
#]
|
21 |
#outputs = pipe(
|
22 |
# messages,
|
23 |
# max_new_tokens=256,
|
24 |
#)
|
25 |
#print(outputs[0]["generated_text"][-1])
|
26 |
|
27 |
+
system_message = "You are a Technical Support Assistant. Read the Context and generate only the summary of the answer to the Query based on your understanding of the <Question> <Answer> pairs in the context."
|
28 |
+
|
29 |
def respond(
|
30 |
message,
|
31 |
history: list[tuple[str, str]],
|
|
|
65 |
demo = gr.ChatInterface(
|
66 |
respond,
|
67 |
additional_inputs=[
|
68 |
+
gr.Textbox(value="You are a Technical Support Assistant. Read the Context and generate only the summary of the answer to the Query based on your understanding of the <Question> <Answer> pairs in the context.", label="System message"),
|
69 |
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
|
70 |
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|
71 |
gr.Slider(
|