Damien Benveniste commited on
Commit
196de9f
·
1 Parent(s): 5d0492c

added streaming

Browse files
Files changed (2) hide show
  1. app/chains.py +2 -1
  2. app/prompts.py +0 -15
app/chains.py CHANGED
@@ -20,7 +20,8 @@ llm = HuggingFaceEndpoint(
20
  repo_id="meta-llama/Meta-Llama-3-8B-Instruct",
21
  huggingfacehub_api_token=os.environ['HF_TOKEN'],
22
  max_new_tokens=512,
23
- stop_sequences=[tokenizer.eos_token]
 
24
  )
25
 
26
  simple_chain = (raw_prompt | llm).with_types(input_type=schemas.UserQuestion)
 
20
  repo_id="meta-llama/Meta-Llama-3-8B-Instruct",
21
  huggingfacehub_api_token=os.environ['HF_TOKEN'],
22
  max_new_tokens=512,
23
+ stop_sequences=[tokenizer.eos_token],
24
+ streaming=True,
25
  )
26
 
27
  simple_chain = (raw_prompt | llm).with_types(input_type=schemas.UserQuestion)
app/prompts.py CHANGED
@@ -36,20 +36,6 @@ Answer the question based only on the following context:
36
  Question: {standalone_question}
37
  """
38
 
39
- map_prompt = """
40
- Given the following list of file paths, return a comma separated list of the most likely files to have content that could potentially help answer the question. Return nothing if none of those would help.
41
- Make sure to return the complete full paths as it is writen in the original list
42
-
43
- File list:
44
- {file_list}
45
-
46
- Question: {question}
47
-
48
- Return a comma separated list of files and nothing else!
49
- Comma separated list:
50
- """
51
-
52
-
53
 
54
  def format_prompt(prompt):
55
  chat = [
@@ -81,5 +67,4 @@ raw_prompt = PromptTemplate.from_template(raw_prompt)
81
  history_prompt_formatted = format_prompt(history_prompt)
82
  question_prompt_formatted = format_prompt(question_prompt)
83
  context_prompt_formatted = format_prompt(context_prompt)
84
- map_prompt_formatted = format_prompt(map_prompt)
85
 
 
36
  Question: {standalone_question}
37
  """
38
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
 
40
  def format_prompt(prompt):
41
  chat = [
 
67
  history_prompt_formatted = format_prompt(history_prompt)
68
  question_prompt_formatted = format_prompt(question_prompt)
69
  context_prompt_formatted = format_prompt(context_prompt)
 
70