Kathirsci commited on
Commit
a002c70
·
verified ·
1 Parent(s): 526dcd3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -25,7 +25,8 @@ QA_CHAIN_PROMPT = PromptTemplate(input_variables=["context", "question"],templat
25
  def predict(message, history):
26
  input_prompt = QA_CHAIN_PROMPT.format(question=message, context=history)
27
  result = llm.generate([input_prompt])
28
- ai_msg = result[0].generations[0].text
 
29
  return ai_msg
30
 
31
 
 
25
  def predict(message, history):
26
  input_prompt = QA_CHAIN_PROMPT.format(question=message, context=history)
27
  result = llm.generate([input_prompt])
28
+ print(result) # Add this line to inspect the result
29
+ # ai_msg = ... (update this line based on the result structure)
30
  return ai_msg
31
 
32