Technologic101 commited on
Commit
54e1b1e
·
1 Parent(s): 0f77dce

task: fix chain context

Browse files
Files changed (1) hide show
  1. app.py +2 -3
app.py CHANGED
@@ -136,8 +136,7 @@ rag_prompt = PromptTemplate.from_template(RAG_PROMPT_TEMPLATE)
136
  """
137
  ### 1. CREATE HUGGINGFACE ENDPOINT FOR LLM
138
  hf_llm = HuggingFaceEndpoint(
139
- endpoint_url=HF_LLM_ENDPOINT,
140
- huggingface_api_token=HF_TOKEN
141
  #model_kwargs={"headers": {"Authorization": f"Bearer {HF_TOKEN}"}}
142
  )
143
 
@@ -164,7 +163,7 @@ async def start_chat():
164
  """
165
 
166
  ### BUILD LCEL RAG CHAIN THAT ONLY RETURNS TEXT
167
- lcel_rag_chain = {"context": itemgetter("context") | hf_retriever, "query": RunnablePassthrough()} | rag_prompt | hf_llm
168
 
169
  cl.user_session.set("lcel_rag_chain", lcel_rag_chain)
170
 
 
136
  """
137
  ### 1. CREATE HUGGINGFACE ENDPOINT FOR LLM
138
  hf_llm = HuggingFaceEndpoint(
139
+ endpoint_url=HF_LLM_ENDPOINT
 
140
  #model_kwargs={"headers": {"Authorization": f"Bearer {HF_TOKEN}"}}
141
  )
142
 
 
163
  """
164
 
165
  ### BUILD LCEL RAG CHAIN THAT ONLY RETURNS TEXT
166
+ lcel_rag_chain = {"context": itemgetter("query") | hf_retriever, "query": RunnablePassthrough()} | rag_prompt | hf_llm
167
 
168
  cl.user_session.set("lcel_rag_chain", lcel_rag_chain)
169