Spaces:
Runtime error
Runtime error
Commit
·
6ed9bdb
1
Parent(s):
ffdbcb0
Fix response declaration
Browse files- chains/openai_model.py +7 -5
chains/openai_model.py
CHANGED
@@ -6,7 +6,7 @@ from langchain.prompts import PromptTemplate
|
|
6 |
from config import TIMEOUT_STREAM
|
7 |
from vector_db import upload_file
|
8 |
from callback import StreamingGradioCallbackHandler
|
9 |
-
from queue import SimpleQueue, Empty
|
10 |
from threading import Thread
|
11 |
from utils import history_file_path, load_lasted_file_username, add_source_numbers, add_details
|
12 |
from chains.custom_chain import CustomConversationalRetrievalChain
|
@@ -183,20 +183,21 @@ class OpenAIModel:
|
|
183 |
yield chatbot, status_text
|
184 |
|
185 |
# Create a funciton to call - this will run in a thread
|
186 |
-
|
187 |
def task():
|
188 |
# Converation + RetrivalChain
|
189 |
qa = CustomConversationalRetrievalChain.from_llm(llm, vectorstore.as_retriever(k=5),
|
190 |
-
condense_question_llm = condense_llm, verbose=True,
|
191 |
condense_question_prompt=condense_prompt,
|
192 |
combine_docs_chain_kwargs={"prompt": qa_prompt},
|
193 |
return_source_documents=True)
|
194 |
# query with input and chat history
|
195 |
response = qa({"question": inputs, "chat_history": self.history})
|
|
|
196 |
q.put(job_done)
|
197 |
|
198 |
thread = Thread(target=task)
|
199 |
-
thread.start()
|
200 |
chatbot.append((inputs, ""))
|
201 |
content = ""
|
202 |
while True:
|
@@ -209,8 +210,9 @@ class OpenAIModel:
|
|
209 |
yield chatbot, status_text
|
210 |
except Empty:
|
211 |
continue
|
212 |
-
|
213 |
# add citation info to response
|
|
|
214 |
relevant_docs = response["source_documents"]
|
215 |
reference_results = [d.page_content for d in relevant_docs]
|
216 |
display_append = add_details(reference_results)
|
|
|
6 |
from config import TIMEOUT_STREAM
|
7 |
from vector_db import upload_file
|
8 |
from callback import StreamingGradioCallbackHandler
|
9 |
+
from queue import SimpleQueue, Empty, Queue
|
10 |
from threading import Thread
|
11 |
from utils import history_file_path, load_lasted_file_username, add_source_numbers, add_details
|
12 |
from chains.custom_chain import CustomConversationalRetrievalChain
|
|
|
183 |
yield chatbot, status_text
|
184 |
|
185 |
# Create a funciton to call - this will run in a thread
|
186 |
+
response_queue = Queue()
|
187 |
def task():
|
188 |
# Converation + RetrivalChain
|
189 |
qa = CustomConversationalRetrievalChain.from_llm(llm, vectorstore.as_retriever(k=5),
|
190 |
+
condense_question_llm = condense_llm, verbose=True,
|
191 |
condense_question_prompt=condense_prompt,
|
192 |
combine_docs_chain_kwargs={"prompt": qa_prompt},
|
193 |
return_source_documents=True)
|
194 |
# query with input and chat history
|
195 |
response = qa({"question": inputs, "chat_history": self.history})
|
196 |
+
response_queue.put(response)
|
197 |
q.put(job_done)
|
198 |
|
199 |
thread = Thread(target=task)
|
200 |
+
thread.start()
|
201 |
chatbot.append((inputs, ""))
|
202 |
content = ""
|
203 |
while True:
|
|
|
210 |
yield chatbot, status_text
|
211 |
except Empty:
|
212 |
continue
|
213 |
+
|
214 |
# add citation info to response
|
215 |
+
response = response_queue.get()
|
216 |
relevant_docs = response["source_documents"]
|
217 |
reference_results = [d.page_content for d in relevant_docs]
|
218 |
display_append = add_details(reference_results)
|