AFischer1985 commited on
Commit
489295a
·
verified ·
1 Parent(s): 60b819a

Update run.py

Browse files
Files changed (1) hide show
  1. run.py +9 -9
run.py CHANGED
@@ -246,19 +246,19 @@ def response(message, history,customSysPrompt,settings):
246
 
247
  print("AI running on prem!" if(onPrem) else "AI running HFHub!")
248
  if(onPrem==False):
249
- temperature=float(0.9)
250
- max_new_tokens=500
251
- top_p=0.95
252
- repetition_penalty=1.0
253
- if temperature < 1e-2: temperature = 1e-2
254
- top_p = float(top_p)
255
- generate_kwargs = dict(
256
  #temperature=temperature,
257
- max_new_tokens=max_new_tokens,
258
  #top_p=top_p,
259
  #repetition_penalty=repetition_penalty,
260
  #do_sample=True,
261
- seed=42,
262
  )
263
  stream = client.text_generation(prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
264
  response = ""
 
246
 
247
  print("AI running on prem!" if(onPrem) else "AI running HFHub!")
248
  if(onPrem==False):
249
+ generate_kwargs = dict( #https://github.com/huggingface/chat-ui/blob/main/.env.template
250
+ temperature=0.6,
251
+ top_p=0.95,
252
+ repetition_penalty=1.2,
253
+ top_k=50,
254
+ truncate=24576,
255
+ max_new_tokens=8192
256
  #temperature=temperature,
257
+ #max_new_tokens=max_new_tokens,
258
  #top_p=top_p,
259
  #repetition_penalty=repetition_penalty,
260
  #do_sample=True,
261
+ #seed=42,
262
  )
263
  stream = client.text_generation(prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
264
  response = ""