pankajsingh3012 commited on
Commit
deff711
·
verified ·
1 Parent(s): 4274425

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -108,14 +108,14 @@ if query and st.session_state.documents_loaded:
108
  llm = GoogleGenerativeAI(model='gemini-1.0-pro', google_api_key="AIzaSyC1AvHnvobbycU8XSCXh-gRq3DUfG0EP98")
109
 
110
  # Create a PromptTemplate for the QA chain
111
- qa_prompt = PromptTemplate(template="Answer the following question based on the context provided:\n\n{context}\n\nQuestion: {question}\nAnswer:", input_variables=["context", "question"])
112
 
113
  # Create the retrieval QA chain
114
  qa_chain = RetrievalQA.from_chain_type(
115
  retriever=st.session_state.vector_store.as_retriever(),
116
  chain_type="map_reduce",
117
  llm=llm,
118
- prompt=qa_prompt
119
  )
120
 
121
  response = qa_chain({"question": query})
 
108
  llm = GoogleGenerativeAI(model='gemini-1.0-pro', google_api_key="AIzaSyC1AvHnvobbycU8XSCXh-gRq3DUfG0EP98")
109
 
110
  # Create a PromptTemplate for the QA chain
111
+ qa_prompt = PromptTemplate(template="Answer the following question based on the context provided:\n\nContext: {context}\n\nQuestion: {question}\n\nAnswer:", input_variables=["context", "question"])
112
 
113
  # Create the retrieval QA chain
114
  qa_chain = RetrievalQA.from_chain_type(
115
  retriever=st.session_state.vector_store.as_retriever(),
116
  chain_type="map_reduce",
117
  llm=llm,
118
+ chain_type_kwargs={"prompt": qa_prompt}
119
  )
120
 
121
  response = qa_chain({"question": query})