ECUiVADE commited on
Commit
ed71d13
·
verified ·
1 Parent(s): 2d91a26

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -26
app.py CHANGED
@@ -59,7 +59,7 @@ Age=""
59
  chat_log_name =""
60
 
61
  from llama_cpp import Llama
62
- llm = Llama(model_path=model_file, model_type="mistral")
63
 
64
  def get_drive_service():
65
  credentials = service_account.Credentials.from_service_account_file(
@@ -82,29 +82,6 @@ def search_file():
82
  return files
83
 
84
 
85
- def format_prompt(message, history):
86
-
87
- global isFirstRun
88
-
89
- if not isFirstRun:
90
- print("reg prompt")
91
- prompt = "<s>"
92
- for i, (user_prompt,bot_response) in enumerate(chat_history):
93
- if i == 0:
94
- prompt += f"[INST]{user_prompt}[/INST]"
95
- else:
96
- prompt += f"Nurse : {user_prompt}"
97
- prompt += f" Barry: {bot_response}"
98
- prompt += f"Nurse: {message} Barry:</s>"
99
-
100
- else:
101
- prompt = "<s>"
102
- isFirstRun = False
103
- prompt += f"[INST] {message} [/INST] Barry:</s>"
104
- print("init prompt")
105
-
106
- return prompt
107
-
108
  def strip_special_tokens(text):
109
  # List of special tokens to be removed
110
  special_tokens = ["</s>", "<s>", "[INST]", "[/INST]"]
@@ -162,9 +139,9 @@ def generate(prompt, history):
162
 
163
  context += """
164
  <|im_start|>nurse
165
- Nurse: """+prompt+"""
166
  <|im_start|>barry
167
- Barry:
168
  """
169
 
170
  response = ""
 
59
  chat_log_name =""
60
 
61
  from llama_cpp import Llama
62
+ llm = Llama(model_path=model_file, model_type="mistral",n_ctx = 2048)
63
 
64
  def get_drive_service():
65
  credentials = service_account.Credentials.from_service_account_file(
 
82
  return files
83
 
84
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
85
  def strip_special_tokens(text):
86
  # List of special tokens to be removed
87
  special_tokens = ["</s>", "<s>", "[INST]", "[/INST]"]
 
139
 
140
  context += """
141
  <|im_start|>nurse
142
+ """+prompt+"""
143
  <|im_start|>barry
144
+
145
  """
146
 
147
  response = ""