ginipick commited on
Commit
77f834a
·
verified ·
1 Parent(s): 570779f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -6
app.py CHANGED
@@ -20,6 +20,7 @@ from unidecode import unidecode
20
 
21
  import re
22
  from langchain.schema import Document
 
23
 
24
  # API 클라이언트 초기화
25
  llm_client = InferenceClient("CohereForAI/c4ai-command-r-plus", token=os.getenv("HF_TOKEN"))
@@ -61,10 +62,11 @@ def long_text_result(file):
61
  # Vector DB와 관련 chain 설정
62
  global chat_chain
63
  memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
 
64
  retriever = create_history_aware_retriever(
65
  retriever=vector_db.as_retriever(),
66
  llm=llm_client,
67
- prompt="Based on the following document, please provide a relevant response:"
68
  )
69
  chat_chain = create_retrieval_chain(
70
  retriever=retriever,
@@ -228,8 +230,3 @@ with iface:
228
 
229
  if __name__ == "__main__":
230
  iface.launch()
231
-
232
-
233
-
234
-
235
-
 
20
 
21
  import re
22
  from langchain.schema import Document
23
+ from langchain.prompts import PromptTemplate
24
 
25
  # API 클라이언트 초기화
26
  llm_client = InferenceClient("CohereForAI/c4ai-command-r-plus", token=os.getenv("HF_TOKEN"))
 
62
  # Vector DB와 관련 chain 설정
63
  global chat_chain
64
  memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
65
+ prompt_template = PromptTemplate(template="Based on the following document, please provide a relevant response:", input_variables=["context"])
66
  retriever = create_history_aware_retriever(
67
  retriever=vector_db.as_retriever(),
68
  llm=llm_client,
69
+ prompt=prompt_template
70
  )
71
  chat_chain = create_retrieval_chain(
72
  retriever=retriever,
 
230
 
231
  if __name__ == "__main__":
232
  iface.launch()