Aman commited on
Commit
bd9991a
·
verified ·
1 Parent(s): 2748af1

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +4 -4
README.md CHANGED
@@ -19,9 +19,9 @@ model = LLM(YOUR_MODEL_PATH, dtype="half")
19
  sampling_params = SamplingParams(temperature=0.0, top_p=1.0, max_tokens=100, skip_special_tokens=False)
20
 
21
  def format_prompt(input, paragraph=None):
22
- prompt = "### Instruction:\n{0}\n\n### Response:\n".format(input)
23
- if paragraph is not None:
24
- prompt += "[Retrieval]<paragraph>{0}</paragraph>".format(paragraph)
25
  return prompt
26
 
27
  query_1 = "你好呀"
@@ -30,7 +30,7 @@ queries = [query_1, query_2]
30
 
31
  preds = model.generate([format_prompt(query) for query in queries], sampling_params)
32
  for pred in preds:
33
- print("Model prediction: {0}".format(pred.outputs[0].text))
34
  # Model prediction: [No Retrieval] 你好!有什么我可以帮你解答的问题吗? [Utility:5] </s>
35
  # Model prediction: [Retrieval] <paragraph> ... (this query requires factual grounding, call a retriever) </paragraph> [Relevant] 太和殿、中和殿、保和殿 [Utility:5] </s>
36
  ```
 
19
  sampling_params = SamplingParams(temperature=0.0, top_p=1.0, max_tokens=100, skip_special_tokens=False)
20
 
21
  def format_prompt(input, paragraph=None):
22
+ prompt = "### Instruction:\n{0}\n\n### Response:\n".format(input)
23
+ if paragraph is not None:
24
+ prompt += "[Retrieval]<paragraph>{0}</paragraph>".format(paragraph)
25
  return prompt
26
 
27
  query_1 = "你好呀"
 
30
 
31
  preds = model.generate([format_prompt(query) for query in queries], sampling_params)
32
  for pred in preds:
33
+ print("Model prediction: {0}".format(pred.outputs[0].text))
34
  # Model prediction: [No Retrieval] 你好!有什么我可以帮你解答的问题吗? [Utility:5] </s>
35
  # Model prediction: [Retrieval] <paragraph> ... (this query requires factual grounding, call a retriever) </paragraph> [Relevant] 太和殿、中和殿、保和殿 [Utility:5] </s>
36
  ```