Update app.py
Browse files
app.py
CHANGED
@@ -151,8 +151,6 @@ def generate(prompt, history):
|
|
151 |
output = llm(context, max_tokens=400, stop=["Nurse:"], echo=False)
|
152 |
response = output["choices"][0]["text"]
|
153 |
response = response.strip()
|
154 |
-
history2.append(("generation", response))
|
155 |
-
yield history
|
156 |
|
157 |
|
158 |
# for output in llm(input, stream=True, max_tokens=100, ):
|
@@ -165,11 +163,11 @@ def generate(prompt, history):
|
|
165 |
history.append((prompt, response))
|
166 |
context += response
|
167 |
print (context)
|
168 |
-
|
169 |
|
170 |
else:
|
171 |
output = "Did you forget to enter your Details? Please go to the User Info Tab and Input your data. "
|
172 |
-
|
173 |
|
174 |
def predict(input, chatbot, max_length, top_p, temperature, history):
|
175 |
chatbot.append((input, ""))
|
|
|
151 |
output = llm(context, max_tokens=400, stop=["Nurse:"], echo=False)
|
152 |
response = output["choices"][0]["text"]
|
153 |
response = response.strip()
|
|
|
|
|
154 |
|
155 |
|
156 |
# for output in llm(input, stream=True, max_tokens=100, ):
|
|
|
163 |
history.append((prompt, response))
|
164 |
context += response
|
165 |
print (context)
|
166 |
+
return history
|
167 |
|
168 |
else:
|
169 |
output = "Did you forget to enter your Details? Please go to the User Info Tab and Input your data. "
|
170 |
+
return output
|
171 |
|
172 |
def predict(input, chatbot, max_length, top_p, temperature, history):
|
173 |
chatbot.append((input, ""))
|