Update app.py
Browse files
app.py
CHANGED
@@ -14,9 +14,6 @@ from langchain.vectorstores import Chroma
|
|
14 |
|
15 |
from langchain.chains import RetrievalQA
|
16 |
|
17 |
-
from langchain.memory import ConversationBufferMemory
|
18 |
-
from langchain.chains import ConversationChain
|
19 |
-
|
20 |
def loading_pdf():
|
21 |
return "Loading..."
|
22 |
|
@@ -30,11 +27,6 @@ def pdf_changes(pdf_doc):
|
|
30 |
qa = RetrievalQA.from_chain_type(
|
31 |
llm=OpenAI(temperature=0.5),
|
32 |
chain_type="stuff",
|
33 |
-
conversation = ConversationChain(
|
34 |
-
llm=llm,
|
35 |
-
verbose=True,
|
36 |
-
memory=ConversationBufferMemory()
|
37 |
-
),
|
38 |
retriever=retriever,
|
39 |
return_source_documents=True)
|
40 |
return "Ready"
|
@@ -52,7 +44,7 @@ def infer(question):
|
|
52 |
|
53 |
query = question
|
54 |
result = qa({"query": query})
|
55 |
-
print(result)
|
56 |
return result
|
57 |
|
58 |
css="""
|
|
|
14 |
|
15 |
from langchain.chains import RetrievalQA
|
16 |
|
|
|
|
|
|
|
17 |
def loading_pdf():
|
18 |
return "Loading..."
|
19 |
|
|
|
27 |
qa = RetrievalQA.from_chain_type(
|
28 |
llm=OpenAI(temperature=0.5),
|
29 |
chain_type="stuff",
|
|
|
|
|
|
|
|
|
|
|
30 |
retriever=retriever,
|
31 |
return_source_documents=True)
|
32 |
return "Ready"
|
|
|
44 |
|
45 |
query = question
|
46 |
result = qa({"query": query})
|
47 |
+
#print(result)
|
48 |
return result
|
49 |
|
50 |
css="""
|