binh99 commited on
Commit
9a093dc
·
1 Parent(s): 0310a05

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -4
app.py CHANGED
@@ -31,7 +31,7 @@ prompt_template = "Instructions: Compose a comprehensive reply to the query usin
31
  "with the same name, create separate answers for each. Only include information found in the results and " \
32
  "don't add any additional information. Make sure the answer is correct and don't output false content. " \
33
  "Ignore outlier search results which has nothing to do with the question. Only answer what is asked. " \
34
- "The answer should be short and concise. Reply in {reply_language}. \n\nQuery: {question}\nAnswer: "
35
 
36
  # MODELS = ["universal-sentence-encoder", "instructor-large"]
37
  MODELS = ["text-davinci-001", "text-davinci-002", "text-davinci-003"]
@@ -150,11 +150,10 @@ def predict(
150
  topn_chunks = [chunks[i] for i in index_top_chunks]
151
  prompt = ""
152
  prompt += 'search results:\n\n'
153
- prompt_lang = prompt_template.replace("{reply_language}", lang)
154
  for c in topn_chunks:
155
  prompt += c + '\n\n'
156
- prompt += prompt_lang
157
- prompt += f"Query: {inputs}\nAnswer:"
158
  inputs = prompt
159
  reference_results = add_source_numbers(topn_chunks)
160
  display_reference = add_details(reference_results)
 
31
  "with the same name, create separate answers for each. Only include information found in the results and " \
32
  "don't add any additional information. Make sure the answer is correct and don't output false content. " \
33
  "Ignore outlier search results which has nothing to do with the question. Only answer what is asked. " \
34
+ "The answer should be short and concise. \n\nQuery: {question}\nAnswer: "
35
 
36
  # MODELS = ["universal-sentence-encoder", "instructor-large"]
37
  MODELS = ["text-davinci-001", "text-davinci-002", "text-davinci-003"]
 
150
  topn_chunks = [chunks[i] for i in index_top_chunks]
151
  prompt = ""
152
  prompt += 'search results:\n\n'
 
153
  for c in topn_chunks:
154
  prompt += c + '\n\n'
155
+ prompt += prompt_template
156
+ prompt += f"Query: {inputs}. Reply in {lang}\nAnswer:"
157
  inputs = prompt
158
  reference_results = add_source_numbers(topn_chunks)
159
  display_reference = add_details(reference_results)