DrishtiSharma commited on
Commit
3d18e56
·
verified ·
1 Parent(s): 9fd5d26

Update app1.py

Browse files
Files changed (1) hide show
  1. app1.py +30 -0
app1.py CHANGED
@@ -240,5 +240,35 @@ if query:
240
  """
241
  )
242
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
243
 
244
 
 
240
  """
241
  )
242
 
243
+ context_relevancy_evaluation_chain = LLMChain(llm=llm_judge, prompt=context_relevancy_checker_prompt, output_key="relevancy_response")
244
+
245
+ response_crisis = context_relevancy_evaluation_chain.invoke({"context":context,"retriever_query":query})
246
+
247
+ pick_relevant_context_chain = LLMChain(llm=llm_judge, prompt=relevant_prompt, output_key="context_number")
248
+
249
+ relevant_response = pick_relevant_context_chain.invoke({"relevancy_response":response_crisis['relevancy_response']})
250
+
251
+ relevant_contexts_chain = LLMChain(llm=llm_judge, prompt=context_prompt, output_key="relevant_contexts")
252
+
253
+ contexts = relevant_contexts_chain.invoke({"context_number":relevant_response['context_number'],"context":context})
254
+
255
+ response_chain = LLMChain(llm=rag_llm,prompt=final_prompt,output_key="final_response")
256
+
257
+ response = chain.invoke({"query":query,"context":contexts['relevant_contexts']})
258
+
259
+ # Orchestrate using SequentialChain
260
+ context_management_chain = SequentialChain(
261
+ chains=[context_relevancy_evaluation_chain ,pick_relevant_context_chain, relevant_contexts_chain,response_chain],
262
+ input_variables=["context","retriever_query","query"],
263
+ output_variables=["relevancy_response", "context_number","relevant_contexts","final_response"]
264
+ )
265
+
266
+ final_output = context_management_chain({"context":context,"retriever_query":query,"query":query})
267
+
268
+
269
+
270
+
271
+
272
+
273
 
274